repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
git-commit/TardisDiff
|
TardisDiff.py
|
1
|
4981
|
import sys
import os
import inspect
from PyQt5 import QtWidgets, QtCore, QtGui
import plugnplay
from uptime import boottime
from TardisUtil import TardisOptions, TimeSubmitter
class TardisDiff(QtWidgets.QMainWindow):
def __init__(self):
super(TardisDiff, self).__init__()
self.difference = 0
self.clipboard = QtWidgets.QApplication.clipboard()
# Set hot keys
QtWidgets.QShortcut(QtGui.QKeySequence("Ctrl+Shift+C"), self,
self.setClipboard)
QtWidgets.QShortcut(QtGui.QKeySequence("Ctrl+Shift+T"), self,
self.notify_time_submitters)
self.options = TardisOptions()
# Get plugins
plugnplay.plugin_dirs = ['./plugins', ]
plugnplay.load_plugins()
# Get directory path
# From: http://stackoverflow.com/a/22881871/1963958
if getattr(sys, 'frozen', False): # py2exe, PyInstaller, cx_Freeze
script_path = os.path.abspath(sys.executable)
else:
script_path = inspect.getabsfile(TardisDiff)
script_path = os.path.realpath(script_path)
script_path = os.path.dirname(script_path)
# Google for a fancy tardis icon until I've made one
self.setWindowIcon(QtGui.QIcon(
os.path.join(script_path, 'icon', 'tardis-by-camilla-isabell-kasbo.ico')))
self.initUI()
def initUI(self):
# Create and initialize UI elements
self.contentWidget = QtWidgets.QWidget()
self.gridLayout = QtWidgets.QGridLayout(self.contentWidget)
self.formLayout = QtWidgets.QFormLayout()
self.timeEdit1 = QtWidgets.QTimeEdit(self.contentWidget)
self.timeEdit2 = QtWidgets.QTimeEdit(self.contentWidget)
self.timeEditBreakTime = QtWidgets.QTimeEdit(self.contentWidget)
self.timeEditBreakTime.setDisplayFormat("h:mm")
self.timeEditBreakTime.setCurrentSection(
QtWidgets.QDateTimeEdit.MinuteSection)
self.timeEditBreakTime.setTime(QtCore.QTime(0, 30))
self.label_timeDiffOut = QtWidgets.QLabel(self.contentWidget)
self.button_time1_now = QtWidgets.QPushButton(
"Now", self.contentWidget)
self.button_time2_now = QtWidgets.QPushButton(
"Now", self.contentWidget)
self.label_timeDiffOut.setText("")
self.timeEdit1.setTime(self.getStartTime())
self.timeEdit2.setTime(QtCore.QTime.currentTime())
# Add UI elements
row1 = QtWidgets.QHBoxLayout()
row1.addWidget(self.timeEdit1)
row1.addWidget(self.button_time1_now)
row2 = QtWidgets.QHBoxLayout()
row2.addWidget(self.timeEdit2)
row2.addWidget(self.button_time2_now)
self.formLayout.addRow("Time 1:", row1)
self.formLayout.addRow("Time 2:", row2)
self.formLayout.addRow("Break Time:", self.timeEditBreakTime)
self.formLayout.addRow("Difference:", self.label_timeDiffOut)
self.gridLayout.addLayout(self.formLayout, 0, 0, 1, 1)
self.setCentralWidget(self.contentWidget)
self.statusBar()
# connect slots
self.timeEdit1.timeChanged.connect(self.inputChanged)
self.timeEdit2.timeChanged.connect(self.inputChanged)
self.timeEditBreakTime.timeChanged.connect(self.inputChanged)
self.button_time1_now.pressed.connect(self.reset_time1)
self.button_time2_now.pressed.connect(self.reset_time2)
self.setWindowTitle('TardisDiff')
self.inputChanged()
self.show()
def inputChanged(self):
"""
Checks both time inputs and the break time
input to determine the difference.
Then calls the method to update the ui.
"""
time1 = self.timeEdit1.time()
time2 = self.timeEdit2.time()
breakTime = self.timeEditBreakTime.time().secsTo(QtCore.QTime(0, 0))
self.difference = (time1.secsTo(time2) + breakTime) / 3600
self.difference = round(self.difference, 2)
self.label_timeDiffOut.setText(str(self.difference))
def reset_time1(self):
self.timeEdit1.setTime(QtCore.QTime.currentTime())
def reset_time2(self):
self.timeEdit2.setTime(QtCore.QTime.currentTime())
def setClipboard(self):
"""Sets the current diff text to clipboard"""
self.clipboard.setText(str(self.difference))
self.statusBar().showMessage("Copied to clipboard.")
def getStartTime(self):
return TardisDiff.getBootTimeAsQTime()\
if self.options.isStartTimeAuto()\
else QtCore.QTime.fromString(self.options.getStartTime())
def notify_time_submitters(self):
TimeSubmitter.submit_time(self.difference)
@staticmethod
def getBootTimeAsQTime():
return QtCore.QDateTime(boottime()).time()
def main():
app = QtWidgets.QApplication(sys.argv)
ed = TardisDiff()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
isc
| 2,382,846,917,862,565,000
| 35.094203
| 86
| 0.656495
| false
| 3.770628
| false
| false
| false
|
PPKE-Bioinf/consensx.itk.ppke.hu
|
consensx/storage/csv.py
|
1
|
1205
|
class CSVBuffer(object):
"""Class which stores data for values.CSV"""
def __init__(self, my_path):
self.working_dir = my_path
self.max_resnum = -1
self.min_resnum = 100000
self.csv_data = []
def add_data(self, data):
self.csv_data.append(data)
def write_csv(self):
filename = self.working_dir + "values.csv"
output_csv = open(filename, 'w')
output_csv.write(',')
for data in self.csv_data:
output_csv.write(data["name"] + " EXP, " + data["name"] + " CALC,")
output_csv.write("\n")
for resnum in range(self.min_resnum, self.max_resnum + 1):
output_csv.write(str(resnum) + ',')
for data in self.csv_data:
exp = {}
for i in data["experimental"]:
exp[i.resnum] = i.value
try:
output_csv.write(
"{0:.2f}".format(exp[resnum]) + ',' +
"{0:.2f}".format(data["calced"][resnum]) + ','
)
except (IndexError, KeyError):
output_csv.write(',,')
output_csv.write("\n")
|
mit
| 8,709,585,037,877,639,000
| 31.567568
| 79
| 0.46971
| false
| 3.753894
| false
| false
| false
|
philipgian/pre-commit
|
pre_commit/make_archives.py
|
1
|
2079
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os.path
import tarfile
from pre_commit import five
from pre_commit import output
from pre_commit.util import cmd_output
from pre_commit.util import cwd
from pre_commit.util import rmtree
from pre_commit.util import tmpdir
# This is a script for generating the tarred resources for git repo
# dependencies. Currently it's just for "vendoring" ruby support packages.
REPOS = (
('rbenv', 'git://github.com/rbenv/rbenv', 'e60ad4a'),
('ruby-build', 'git://github.com/rbenv/ruby-build', '9bc9971'),
(
'ruby-download',
'git://github.com/garnieretienne/rvm-download',
'09bd7c6',
),
)
RESOURCES_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'resources')
)
def make_archive(name, repo, ref, destdir):
"""Makes an archive of a repository in the given destdir.
:param text name: Name to give the archive. For instance foo. The file
that is created will be called foo.tar.gz.
:param text repo: Repository to clone.
:param text ref: Tag/SHA/branch to check out.
:param text destdir: Directory to place archives in.
"""
output_path = os.path.join(destdir, name + '.tar.gz')
with tmpdir() as tempdir:
# Clone the repository to the temporary directory
cmd_output('git', 'clone', repo, tempdir)
with cwd(tempdir):
cmd_output('git', 'checkout', ref)
# We don't want the '.git' directory
# It adds a bunch of size to the archive and we don't use it at
# runtime
rmtree(os.path.join(tempdir, '.git'))
with tarfile.open(five.n(output_path), 'w|gz') as tf:
tf.add(tempdir, name)
return output_path
def main():
for archive_name, repo, ref in REPOS:
output.write_line('Making {}.tar.gz for {}@{}'.format(
archive_name, repo, ref,
))
make_archive(archive_name, repo, ref, RESOURCES_DIR)
if __name__ == '__main__':
exit(main())
|
mit
| -5,814,434,410,959,443,000
| 27.875
| 76
| 0.644541
| false
| 3.511824
| false
| false
| false
|
coddingtonbear/d-rats
|
d_rats/gps.py
|
1
|
33132
|
import re
import time
import tempfile
import platform
import datetime
import subst
import threading
import serial
import socket
from math import pi,cos,acos,sin,atan2
import utils
if __name__ == "__main__":
import gettext
gettext.install("D-RATS")
TEST = "$GPGGA,180718.02,4531.3740,N,12255.4599,W,1,07,1.4,50.6,M,-21.4,M,,*63 KE7JSS ,440.350+ PL127.3"
EARTH_RADIUS = 3963.1
EARTH_UNITS = "mi"
DEGREE = u"\u00b0"
DPRS_TO_APRS = {}
# The DPRS to APRS mapping is pretty horrific, but the following
# attempts to create a mapping based on looking at the javascript
# for DPRSCalc and a list of regular APRS symbols
#
# http://ham-shack.com/aprs_pri_symbols.html
# http://www.aprs-is.net/DPRSCalc.aspx
for i in range(0, 26):
asciival = ord("A") + i
char = chr(asciival)
pri = "/"
sec = "\\"
DPRS_TO_APRS["P%s" % char] = pri + char
DPRS_TO_APRS["L%s" % char] = pri + char.lower()
DPRS_TO_APRS["A%s" % char] = sec + char
DPRS_TO_APRS["S%s" % char] = sec + char.lower()
if i <= 15:
pchar = chr(ord(" ") + i)
DPRS_TO_APRS["B%s" % char] = pri + pchar
DPRS_TO_APRS["O%s" % char] = sec + pchar
elif i >= 17:
pchar = chr(ord(" ") + i + 9)
DPRS_TO_APRS["M%s" % char] = pri + pchar
DPRS_TO_APRS["N%s" % char] = sec + pchar
if i <= 5:
char = chr(ord("S") + i)
pchar = chr(ord("[") + i)
DPRS_TO_APRS["H%s" % char] = pri + pchar
DPRS_TO_APRS["D%s" % char] = sec + pchar
#for k in sorted(DPRS_TO_APRS.keys()):
# print "%s => %s" % (k, DPRS_TO_APRS[k])
APRS_TO_DPRS = {}
for k,v in DPRS_TO_APRS.items():
APRS_TO_DPRS[v] = k
def dprs_to_aprs(symbol):
if len(symbol) < 2:
print "Invalid DPRS symbol: `%s'" % symbol
return None
else:
return DPRS_TO_APRS.get(symbol[0:2], None)
def parse_dms(string):
string = string.replace(u"\u00b0", " ")
string = string.replace('"', ' ')
string = string.replace("'", ' ')
string = string.replace(' ', ' ')
string = string.strip()
try:
(d, m, s) = string.split(' ', 3)
deg = int(d)
min = int(m)
sec = float(s)
except Exception, e:
deg = min = sec = 0
if deg < 0:
mul = -1
else:
mul = 1
deg = abs(deg)
return (deg + (min / 60.0) + (sec / 3600.0)) * mul
def set_units(units):
global EARTH_RADIUS
global EARTH_UNITS
if units == _("Imperial"):
EARTH_RADIUS = 3963.1
EARTH_UNITS = "mi"
elif units == _("Metric"):
EARTH_RADIUS = 6380.0
EARTH_UNITS = "km"
print "Set GPS units to %s" % units
def value_with_units(value):
if value < 0.5:
if EARTH_UNITS == "km":
scale = 1000
units = "m"
elif EARTH_UNITS == "mi":
scale = 5280
units = "ft"
else:
scale = 1
units = EARTH_UNITS
else:
scale = 1
units = EARTH_UNITS
return "%.2f %s" % (value * scale, units)
def NMEA_checksum(string):
checksum = 0
for i in string:
checksum ^= ord(i)
return "*%02x" % checksum
def GPSA_checksum(string):
def calc(buf):
icomcrc = 0xffff
for _char in buf:
char = ord(_char)
for i in range(0, 8):
xorflag = (((icomcrc ^ char) & 0x01) == 0x01)
icomcrc = (icomcrc >> 1) & 0x7fff
if xorflag:
icomcrc ^= 0x8408
char = (char >> 1) & 0x7f
return (~icomcrc) & 0xffff
return calc(string)
def DPRS_checksum(callsign, msg):
csum = 0
string = "%-8s,%s" % (callsign, msg)
for i in string:
csum ^= ord(i)
return "*%02X" % csum
def deg2rad(deg):
return deg * (pi / 180)
def rad2deg(rad):
return rad / (pi / 180)
def dm2deg(deg, min):
return deg + (min / 60.0)
def deg2dm(decdeg):
deg = int(decdeg)
min = (decdeg - deg) * 60.0
return deg, min
def nmea2deg(nmea, dir="N"):
deg = int(nmea) / 100
try:
min = nmea % (deg * 100)
except ZeroDivisionError, e:
min = int(nmea)
if dir == "S" or dir == "W":
m = -1
else:
m = 1
return dm2deg(deg, min) * m
def deg2nmea(deg):
deg, min = deg2dm(deg)
return (deg * 100) + min
def meters2feet(meters):
return meters * 3.2808399
def feet2meters(feet):
return feet * 0.3048
def distance(lat_a, lon_a, lat_b, lon_b):
lat_a = deg2rad(lat_a)
lon_a = deg2rad(lon_a)
lat_b = deg2rad(lat_b)
lon_b = deg2rad(lon_b)
earth_radius = EARTH_RADIUS
#print "cos(La)=%f cos(la)=%f" % (cos(lat_a), cos(lon_a))
#print "cos(Lb)=%f cos(lb)=%f" % (cos(lat_b), cos(lon_b))
#print "sin(la)=%f" % sin(lon_a)
#print "sin(lb)=%f" % sin(lon_b)
#print "sin(La)=%f sin(Lb)=%f" % (sin(lat_a), sin(lat_b))
#print "cos(lat_a) * cos(lon_a) * cos(lat_b) * cos(lon_b) = %f" % (\
# cos(lat_a) * cos(lon_a) * cos(lat_b) * cos(lon_b))
#print "cos(lat_a) * sin(lon_a) * cos(lat_b) * sin(lon_b) = %f" % (\
# cos(lat_a) * sin(lon_a) * cos(lat_b) * sin(lon_b))
#print "sin(lat_a) * sin(lat_b) = %f" % (sin(lat_a) * sin(lat_b))
tmp = (cos(lat_a) * cos(lon_a) * \
cos(lat_b) * cos(lon_b)) + \
(cos(lat_a) * sin(lon_a) * \
cos(lat_b) * sin(lon_b)) + \
(sin(lat_a) * sin(lat_b))
# Correct round-off error (which is just *silly*)
if tmp > 1:
tmp = 1
elif tmp < -1:
tmp = -1
distance = acos(tmp)
return distance * earth_radius
def parse_date(string, fmt):
try:
return datetime.datetime.strptime(string, fmt)
except AttributeError, e:
print "Enabling strptime() workaround for Python <= 2.4.x"
vals = {}
for c in "mdyHMS":
i = fmt.index(c)
vals[c] = int(string[i-1:i+1])
if len(vals.keys()) != (len(fmt) / 2):
raise Exception("Not all date bits converted")
return datetime.datetime(vals["y"] + 2000,
vals["m"],
vals["d"],
vals["H"],
vals["M"],
vals["S"])
class GPSPosition(object):
"""Represents a position on the globe, either from GPS data or a static
positition"""
def _from_coords(self, lat, lon, alt=0):
try:
self.latitude = float(lat)
except ValueError:
self.latitude = parse_dms(lat)
try:
self.longitude = float(lon)
except ValueError:
self.longitude = parse_dms(lon)
self.altitude = float(alt)
self.satellites = 3
self.valid = True
def _parse_dprs_comment(self):
symbol = self.comment[0:4].strip()
astidx = self.comment.rindex("*")
checksum = self.comment[astidx:]
_checksum = DPRS_checksum(self.station, self.comment[:astidx])
if int(_checksum[1:], 16) != int(checksum[1:], 16):
print "CHECKSUM(%s): %s != %s" % (self.station,
int(_checksum[1:], 16),
int(checksum[1:], 16))
#print "Failed to parse DPRS comment:"
#print " Comment: |%s|" % self.comment
#print " Check: %s %s (%i)" % (checksum, _checksum, astidx)
raise Exception("DPRS checksum failed")
self.APRSIcon = dprs_to_aprs(symbol)
self.comment = self.comment[4:astidx].strip()
def __init__(self, lat=0, lon=0, station="UNKNOWN"):
self.valid = False
self.altitude = 0
self.satellites = 0
self.station = station
self.comment = ""
self.current = None
self.date = datetime.datetime.now()
self.speed = None
self.direction = None
self.APRSIcon = None
self._original_comment = ""
self._from_coords(lat, lon)
def __iadd__(self, update):
self.station = update.station
if not update.valid:
return self
if update.satellites:
self.satellites = update.satellites
if update.altitude:
self.altitude = update.altitude
self.latitude = update.latitude
self.longitude = update.longitude
self.date = update.date
if update.speed:
self.speed = update.speed
if update.direction:
self.direction = update.direction
if update.comment:
self.comment = update.comment
self._original_comment = update._original_comment
if update.APRSIcon:
self.APRSIcon = update.APRSIcon
return self
def __str__(self):
if self.valid:
if self.current:
dist = self.distance_from(self.current)
bear = self.current.bearing_to(self)
distance = " - %.1f %s " % (dist, EARTH_UNITS) + \
_("away") + \
" @ %.1f " % bear + \
_("degrees")
else:
distance = ""
if self.comment:
comment = " (%s)" % self.comment
else:
comment = ""
if self.speed and self.direction:
if EARTH_UNITS == "mi":
speed = "%.1f mph" % (float(self.speed) * 1.15077945)
elif EARTH_UNITS == "m":
speed = "%.1f km/h" % (float(self.speed) * 1.852)
else:
speed = "%.2f knots" % float(self.speed)
dir = " (" + _("Heading") +" %.0f at %s)" % (self.direction,
speed)
else:
dir = ""
if EARTH_UNITS == "mi":
alt = "%i ft" % meters2feet(self.altitude)
else:
alt = "%i m" % self.altitude
return "%s " % self.station + \
_("reporting") + \
" %.4f,%.4f@%s at %s%s%s%s" % ( \
self.latitude,
self.longitude,
alt,
self.date.strftime("%H:%M:%S"),
subst.subst_string(comment),
distance,
dir)
else:
return "(" + _("Invalid GPS data") + ")"
def _NMEA_format(self, val, latitude):
if latitude:
if val > 0:
d = "N"
else:
d = "S"
else:
if val > 0:
d = "E"
else:
d = "W"
return "%.3f,%s" % (deg2nmea(abs(val)), d)
def station_format(self):
if " " in self.station:
call, extra = self.station.split(" ", 1)
sta = "%-7.7s%1.1s" % (call.strip(),
extra.strip())
else:
sta = self.station
return sta
def to_NMEA_GGA(self, ssid=" "):
"""Returns an NMEA-compliant GPGGA sentence"""
date = time.strftime("%H%M%S")
lat = self._NMEA_format(self.latitude, True)
lon = self._NMEA_format(self.longitude, False)
data = "GPGGA,%s,%s,%s,1,%i,0,%i,M,0,M,," % ( \
date,
lat,
lon,
self.satellites,
self.altitude)
sta = self.station_format()
# If we had an original comment (with some encoding), use that instead
if self._original_comment:
com = self._original_comment
else:
com = self.comment
return "$%s%s\r\n%-8.8s,%-20.20s\r\n" % (data,
NMEA_checksum(data),
sta,
com)
def to_NMEA_RMC(self):
"""Returns an NMEA-compliant GPRMC sentence"""
tstamp = time.strftime("%H%M%S")
dstamp = time.strftime("%d%m%y")
lat = self._NMEA_format(self.latitude, True)
lon = self._NMEA_format(self.longitude, False)
if self.speed:
speed = "%03.1f" % self.speed
else:
speed = "000.0"
if self.direction:
dir = "%03.1f" % self.direction
else:
dir = "000.0"
data = "GPRMC,%s,A,%s,%s,%s,%s,%s,000.0,W" % ( \
tstamp,
lat,
lon,
speed,
dir,
dstamp)
sta = self.station_format()
return "$%s%s\r\n%-8.8s,%-20.20s\r\n" % (data,
NMEA_checksum(data),
sta,
self.comment)
def to_APRS(self, dest="APRATS", symtab="/", symbol=">"):
"""Returns a GPS-A (APRS-compliant) string"""
stamp = time.strftime("%H%M%S", time.gmtime())
if " " in self.station:
sta = self.station.replace(" ", "-")
else:
sta = self.station
s = "%s>%s,DSTAR*:/%sh" % (sta, dest, stamp)
if self.latitude > 0:
ns = "N"
Lm = 1
else:
ns = "S"
Lm = -1
if self.longitude > 0:
ew = "E"
lm = 1
else:
ew = "W"
lm = -1
s += "%07.2f%s%s%08.2f%s%s" % (deg2nmea(self.latitude * Lm), ns,
symtab,
deg2nmea(self.longitude * lm), ew,
symbol)
if self.speed and self.direction:
s += "%03.0f/%03.0f" % (float(self.direction), float(self.speed))
if self.altitude:
s += "/A=%06i" % meters2feet(float(self.altitude))
else:
s += "/"
if self.comment:
l = 43
if self.altitude:
l -= len("/A=xxxxxx")
s += "%s" % self.comment[:l]
s += "\r"
return "$$CRC%04X,%s\n" % (GPSA_checksum(s), s)
def set_station(self, station, comment="D-RATS"):
self.station = station
self.comment = comment
self._original_comment = comment
if len(self.comment) >=7 and "*" in self.comment[-3:-1]:
self._parse_dprs_comment()
def distance_from(self, pos):
return distance(self.latitude, self.longitude,
pos.latitude, pos.longitude)
def bearing_to(self, pos):
lat_me = deg2rad(self.latitude)
lon_me = deg2rad(self.longitude)
lat_u = deg2rad(pos.latitude)
lon_u = deg2rad(pos.longitude)
lat_d = deg2rad(pos.latitude - self.latitude)
lon_d = deg2rad(pos.longitude - self.longitude)
y = sin(lon_d) * cos(lat_u)
x = cos(lat_me) * sin(lat_u) - \
sin(lat_me) * cos(lat_u) * cos(lon_d)
bearing = rad2deg(atan2(y, x))
return (bearing + 360) % 360
def set_relative_to_current(self, current):
self.current = current
def coordinates(self):
return "%.4f,%.4f" % (self.latitude, self.longitude)
def fuzzy_to(self, pos):
dir = self.bearing_to(pos)
dirs = ["N", "NNE", "NE", "ENE", "E",
"ESE", "SE", "SSE", "S",
"SSW", "SW", "WSW", "W",
"WNW", "NW", "NNW"]
delta = 22.5
angle = 0
direction = "?"
for i in dirs:
if dir > angle and dir < (angle + delta):
direction = i
angle += delta
return "%.1f %s %s" % (self.distance_from(pos),
EARTH_UNITS,
direction)
class NMEAGPSPosition(GPSPosition):
"""A GPSPosition initialized from a NMEA sentence"""
def _test_checksum(self, string, csum):
try:
idx = string.index("*")
except:
print "String does not contain '*XY' checksum"
return False
segment = string[1:idx]
csum = csum.upper()
_csum = NMEA_checksum(segment).upper()
if csum != _csum:
print "Failed checksum: %s != %s" % (csum, _csum)
return csum == _csum
def _parse_GPGGA(self, string):
elements = string.split(",", 14)
if len(elements) < 15:
raise Exception("Unable to split GPGGA" % len(elements))
t = time.strftime("%m%d%y") + elements[1]
if "." in t:
t = t.split(".")[0]
self.date = parse_date(t, "%m%d%y%H%M%S")
self.latitude = nmea2deg(float(elements[2]), elements[3])
self.longitude = nmea2deg(float(elements[4]), elements[5])
print "%f,%f" % (self.latitude, self.longitude)
self.satellites = int(elements[7])
self.altitude = float(elements[9])
m = re.match("^([0-9]*)(\*[A-z0-9]{2})\r?\n?(.*)$", elements[14])
if not m:
raise Exception("No checksum (%s)" % elements[14])
csum = m.group(2)
if "," in m.group(3):
sta, com = m.group(3).split(",", 1)
if not sta.strip().startswith("$"):
self.station = utils.filter_to_ascii(sta.strip()[0:8])
self.comment = utils.filter_to_ascii(com.strip()[0:20])
self._original_comment = self.comment
if len(self.comment) >=7 and "*" in self.comment[-3:-1]:
self._parse_dprs_comment()
self.valid = self._test_checksum(string, csum)
def _parse_GPRMC(self, string):
if "\r\n" in string:
nmea, station = string.split("\r\n", 1)
else:
nmea = string
station = ""
elements = nmea.split(",", 12)
if len(elements) < 12:
raise Exception("Unable to split GPRMC (%i)" % len(elements))
t = elements[1]
d = elements[9]
if "." in t:
t = t.split(".", 2)[0]
self.date = parse_date(d+t, "%d%m%y%H%M%S")
self.latitude = nmea2deg(float(elements[3]), elements[4])
self.longitude = nmea2deg(float(elements[5]), elements[6])
self.speed = float(elements[7])
self.direction = float(elements[8])
if "*" in elements[11]:
end = 11 # NMEA <=2.0
elif "*" in elements[12]:
end = 12 # NMEA 2.3
else:
raise Exception("GPRMC has no checksum in 12 or 13")
m = re.match("^.?(\*[A-z0-9]{2})", elements[end])
if not m:
print "Invalid end: %s" % elements[end]
return
csum = m.group(1)
if "," in station:
sta, com = station.split(",", 1)
self.station = utils.filter_to_ascii(sta.strip())
self.comment = utils.filter_to_ascii(com.strip())
self._original_comment = self.comment
if len(self.comment) >= 7 and "*" in self.comment[-3:-1]:
self._parse_dprs_comment()
if elements[2] != "A":
self.valid = False
print "GPRMC marked invalid by GPS (%s)" % elements[2]
else:
print "GPRMC is valid"
self.valid = self._test_checksum(string, csum)
def _from_NMEA_GPGGA(self, string):
string = string.replace('\r', ' ')
string = string.replace('\n', ' ')
try:
self._parse_GPGGA(string)
except Exception, e:
import traceback
import sys
traceback.print_exc(file=sys.stdout)
print "Invalid GPS data: %s" % e
self.valid = False
def _from_NMEA_GPRMC(self, string):
try:
self._parse_GPRMC(string)
except Exception, e:
import traceback
import sys
traceback.print_exc(file=sys.stdout)
print "Invalid GPS data: %s" % e
self.valid = False
def __init__(self, sentence, station=_("UNKNOWN")):
GPSPosition.__init__(self)
if sentence.startswith("$GPGGA"):
self._from_NMEA_GPGGA(sentence)
elif sentence.startswith("$GPRMC"):
self._from_NMEA_GPRMC(sentence)
else:
print "Unsupported GPS sentence type: %s" % sentence
class APRSGPSPosition(GPSPosition):
def _parse_date(self, string):
prefix = string[0]
suffix = string[-1]
digits = string[1:-1]
if suffix == "z":
ds = digits[0:2] + \
time.strftime("%m%y", time.gmtime()) + \
digits[2:] + "00"
elif suffix == "/":
ds = digits[0:2] + time.strftime("%m%y") + digits[2:] + "00"
elif suffix == "h":
ds = time.strftime("%d%m%y", time.gmtime()) + digits
else:
print "Unknown APRS date suffix: `%s'" % suffix
return datetime.datetime.now()
d = parse_date(ds, "%d%m%y%H%M%S")
if suffix in "zh":
delta = datetime.datetime.utcnow() - datetime.datetime.now()
else:
delta = datetime.timedelta(0)
return d - delta
def _parse_GPSA(self, string):
m = re.match("^\$\$CRC([A-Z0-9]{4}),(.*)$", string)
if not m:
return
crc = m.group(1)
_crc = "%04X" % GPSA_checksum(m.group(2))
if crc != _crc:
print "APRS CRC mismatch: %s != %s (%s)" % (crc, _crc, m.group(2))
return
elements = string.split(",")
if not elements[0].startswith("$$CRC"):
print "Missing $$CRC..."
return
self.station, dst = elements[1].split(">")
path, data = elements[2].split(":")
# 1 = Entire stamp or ! or =
# 2 = stamp prefix
# 3 = stamp suffix
# 4 = latitude
# 5 = N/S
# 6 = symbol table
# 7 = longitude
# 8 = E/W
# 9 = symbol
#10 = comment
#11 = altitude string
expr = "^(([@/])[0-9]{6}([/hz])|!|=)" + \
"([0-9]{1,4}\.[0-9]{2})([NS])(.)?" + \
"([0-9]{5}\.[0-9]{2})([EW])(.)" + \
"([^/]*)(/A=[0-9]{6})?"
m = re.search(expr, data)
if not m:
print "Did not match GPS-A: `%s'" % data
return
if m.group(1) in "!=":
self.date = datetime.datetime.now()
elif m.group(2) in "@/":
self.date = self._parse_date(m.group(1))
else:
print "Unknown timestamp prefix: %s" % m.group(1)
self.date = datetime.datetime.now()
self.latitude = nmea2deg(float(m.group(4)), m.group(5))
self.longitude = nmea2deg(float(m.group(7)), m.group(8))
self.comment = m.group(10).strip()
self._original_comment = self.comment
self.APRSIcon = m.group(6) + m.group(9)
if len(m.groups()) == 11 and m.group(11):
_, alt = m.group(11).split("=")
self.altitude = feet2meters(int(alt))
self.valid = True
def _from_APRS(self, string):
self.valid = False
try:
self._parse_GPSA(string)
except Exception, e:
print "Invalid APRS: %s" % e
return False
return self.valid
def __init__(self, message):
GPSPosition.__init__(self)
self._from_APRS(message)
class MapImage(object):
def __init__(self, center):
self.key = "ABQIAAAAWot3KuWpenfCAGfQ65FdzRTaP0xjRaMPpcw6bBbU2QUEXQBgHBR5Rr2HTGXYVWkcBFNkPvxtqV4VLg"
self.center = center
self.markers = [center]
def add_markers(self, markers):
self.markers += markers
def get_image_url(self):
el = [ "key=%s" % self.key,
"center=%s" % self.center.coordinates(),
"size=400x400"]
mstr = "markers="
index = ord("a")
for m in self.markers:
mstr += "%s,blue%s|" % (m.coordinates(), chr(index))
index += 1
el.append(mstr)
return "http://maps.google.com/staticmap?%s" % ("&".join(el))
def station_table(self):
table = ""
index = ord('A')
for m in self.markers:
table += "<tr><td>%s</td><td>%s</td><td>%s</td>\n" % (\
chr(index),
m.station,
m.coordinates())
index += 1
return table
def make_html(self):
return """
<html>
<head>
<title>Known stations</title>
</head>
<body>
<h1> Known Stations </h1>
<img src="%s"/><br/><br/>
<table border="1">
%s
</table>
</body>
</html>
""" % (self.get_image_url(), self.station_table())
def display_in_browser(self):
f = tempfile.NamedTemporaryFile(suffix=".html")
name = f.name
f.close()
f = file(name, "w")
f.write(self.make_html())
f.flush()
f.close()
p = platform.get_platform()
p.open_html_file(f.name)
class GPSSource(object):
def __init__(self, port, rate=4800):
self.port = port
self.enabled = False
self.broken = None
try:
self.serial = serial.Serial(port=port, baudrate=rate, timeout=1)
except Exception, e:
print "Unable to open port `%s': %s" % (port, e)
self.broken = _("Unable to open GPS port")
self.thread = None
self.last_valid = False
self.position = GPSPosition()
def start(self):
if self.broken:
print "Not starting broken GPSSource"
return
self.invalid = 100
self.enabled = True
self.thread = threading.Thread(target=self.gpsthread)
self.thread.setDaemon(True)
self.thread.start()
def stop(self):
if self.thread and self.enabled:
self.enabled = False
self.thread.join()
self.serial.close()
def gpsthread(self):
while self.enabled:
data = self.serial.read(1024)
lines = data.split("\r\n")
for line in lines:
if line.startswith("$GPGGA") or \
line.startswith("$GPRMC"):
position = NMEAGPSPosition(line)
if position.valid and line.startswith("$GPRMC"):
self.invalid = 0
elif self.invalid < 10:
self.invalid += 1
if position.valid and self.position.valid:
self.position += position
print _("ME") + ": %s" % self.position
elif position.valid:
self.position = position
else:
print "Could not parse: %s" % line
def get_position(self):
return self.position
def status_string(self):
if self.broken:
return self.broken
elif self.invalid < 10 and self.position.satellites >= 3:
return _("GPS Locked") + " (%i sats)" % self.position.satellites
else:
return _("GPS Not Locked")
class NetworkGPSSource(GPSSource):
def __init__(self, port):
self.port = port
self.enabled = False
self.thread = None
self.position = GPSPosition()
self.last_valid = False
self.sock = None
self.broken = None
def start(self):
self.enabled = True
self.thread = threading.Thread(target=self.gpsthread)
self.thread.setDaemon(True)
self.thread.start()
def stop(self):
if self.thread and self.enabled:
self.enabled = False
self.thread.join()
def connect(self):
try:
_, host, port = self.port.split(":", 3)
port = int(port)
except ValueError, e:
print "Unable to parse %s (%s)" % (self.port, e)
self.broken = _("Unable to parse address")
return False
print "Connecting to %s:%i" % (host, port)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, port))
self.sock.settimeout(10)
except Exception, e:
print "Unable to connect: %s" % e
self.broken = _("Unable to connect") + ": %s" % e
self.sock = None
return False
self.sock.send("r\n")
return True
def gpsthread(self):
while self.enabled:
if not self.sock:
if not self.connect():
time.sleep(1)
continue
try:
data = self.sock.recv(1024)
except Exception, e:
self.sock.close()
self.sock = None
print _("GPSd Socket closed")
continue
line = data.strip()
if not (line.startswith("$GPGGA") or \
line.startswith("$GPRMC")):
continue
pos = NMEAGPSPosition(line)
self.last_valid = pos.valid
if pos.valid and self.position.valid:
self.position += pos
elif pos.valid:
self.position = pos
else:
print "Could not parse: %s" % line
def get_position(self):
return self.position
def status_string(self):
if self.broken:
return self.broken
elif self.last_valid and self.position.satellites >= 3:
return _("GPSd Locked") + " (%i sats)" % self.position.satellites
else:
return _("GPSd Not Locked")
class StaticGPSSource(GPSSource):
def __init__(self, lat, lon, alt=0):
self.lat = lat
self.lon = lon
self.alt = alt
self.position = GPSPosition(self.lat, self.lon)
self.position.altitude = int(float(alt))
if EARTH_UNITS == "mi":
# This is kinda ugly, but assume we're given altitude in the same
# type of units as we've been asked to display
self.position.altitude = feet2meters(self.position.altitude)
def start(self):
pass
def stop(self):
pass
def get_position(self):
return self.position
def status_string(self):
return _("Static position")
def parse_GPS(string):
fixes = []
while "$" in string:
try:
if "$GPGGA" in string:
fixes.append(NMEAGPSPosition(string[string.index("$GPGGA"):]))
string = string[string.index("$GPGGA")+6:]
elif "$GPRMC" in string:
fixes.append(NMEAGPSPosition(string[string.index("$GPRMC"):]))
string = string[string.index("$GPRMC")+6:]
elif "$$CRC" in string:
return APRSGPSPosition(string[string.index("$$CRC"):])
else:
string = string[string.index("$")+1:]
except Exception, e:
print "Exception during GPS parse: %s" % e
string = string[string.index("$")+1:]
if not fixes:
return None
fix = fixes[0]
fixes = fixes[1:]
for extra in fixes:
print "Appending fix: %s" % extra
fix += extra
return fix
if __name__ == "__main__":
nmea_strings = [
"$GPRMC,010922,A,4603.6695,N,07307.3033,W,0.6,66.8,060508,16.1,W,A*1D\r\nVE2SE 9,MV VE2SE@RAC.CA*32",
"$GPGGA,203008.78,4524.9729,N,12246.9580,W,1,03,3.8,00133,M,,,,*39",
"$GPGGA,183324.518,4533.0875,N,12254.5939,W,2,04,3.4,48.6,M,-19.6,M,1.2,0000*74",
"$GPRMC,215348,A,4529.3672,N,12253.2060,W,0.0,353.8,030508,17.5,E,D*3C",
"$GPGGA,075519,4531.254,N,12259.400,W,1,3,0,0.0,M,0,M,,*55\r\nK7HIO ,GPS Info",
"$GPRMC,074919.04,A,4524.9698,N,12246.9520,W,00.0,000.0,260508,19.,E*79",
"$GPRMC,123449.089,A,3405.1123,N,08436.4301,W,000.0,000.0,021208,,,A*71",
"$GPRMC,123449.089,A,3405.1123,N,08436.4301,W,000.0,000.0,021208,,,A*71\r\nKK7DS M,LJ DAN*C",
"$GPRMC,230710,A,2748.1414,N,08238.5556,W,000.0,033.1,111208,004.3,W*77",
]
print "-- NMEA --"
for s in nmea_strings:
p = NMEAGPSPosition(s)
if p.valid:
print "Pass: %s" % str(p)
else:
print "** FAIL: %s" % s
aprs_strings = [
"$$CRCCE3E,AE5PL-T>API282,DSTAR*:!3302.39N/09644.66W>/\r",
"$$CRC1F72,KI4IFW-1>APRATS,DSTAR*:@291930/4531.50N/12254.98W>APRS test beacon /A=000022",
"$$CRC80C3,VA2PBI>APU25N,DSTAR*:=4539.33N/07330.28W-73 de Pierre D-Star Montreal {UIV32N}",
"$$CRCA31F,VA2PBI>API282,DSTAR*:/221812z4526.56N07302.34W/\r",
'$$CRCF471,AB9FT-ML>APRATS,DSTAR*:@214235h0.00S/00000.00W>ON D-RATS at Work\r',
]
print "\n-- GPS-A --"
for s in aprs_strings:
p = APRSGPSPosition(s)
if p.valid:
print "Pass: %s" % str(p)
else:
print "** FAIL: %s" % s
|
gpl-3.0
| 4,689,796,423,073,490,000
| 28.063158
| 111
| 0.489165
| false
| 3.367758
| false
| false
| false
|
kirti3192/spoken-website
|
mdldjango/views.py
|
1
|
15036
|
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from django.core.context_processors import csrf
from models import MdlUser
from events.models import TrainingAttendance
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from forms import *
from django.contrib import messages
import xml.etree.cElementTree as etree
from xml.etree.ElementTree import ElementTree
# Create your views here.
import hashlib
import csv, os, time
from django.core.exceptions import PermissionDenied
from events.views import *
from events.models import *
from django.conf import settings
from events.forms import OrganiserForm
from django.core.mail import EmailMultiAlternatives
from validate_email import validate_email
from get_or_create_participant import get_or_create_participant, encript_password, check_csvfile
def authenticate(username = None, password = None):
try:
#print " i am in moodle auth"
user = MdlUser.objects.get(username=username)
#print user
pwd = user.password
p = encript_password(password)
pwd_valid = (pwd == p)
#print pwd
#print "------------"
if user and pwd_valid:
return user
except Exception, e:
#print e
#print "except ---"
return None
def mdl_logout(request):
if 'mdluserid' in request.session:
del request.session['mdluserid']
request.session.save()
#print "logout !!"
return HttpResponseRedirect('/participant/login')
def mdl_login(request):
messages = {}
if request.POST:
username = request.POST["username"]
password = request.POST["password"]
if not username or not password:
messages['error'] = "Please enter valide Username and Password!"
#return HttpResponseRedirect('/participant/login')
user = authenticate(username = username, password = password)
if user:
request.session['mdluserid'] = user.id
request.session['mdluseremail'] = user.email
request.session['mdlusername'] = user.username
request.session['mdluserinstitution'] = user.institution
request.session.save()
request.session.modified = True
else:
messages['error'] = "Username or Password Doesn't match!"
if request.session.get('mdluserid'):
#print "Current user is ", request.session.get('mdluserid')
return HttpResponseRedirect('/participant/index')
context = {'message':messages}
context.update(csrf(request))
return render(request, 'mdl/templates/mdluser_login.html', context)
def index(request):
mdluserid = request.session.get('mdluserid')
mdlusername = request.session.get('mdlusername')
if not mdluserid:
return HttpResponseRedirect('/participant/login')
try:
mdluser = MdlUser.objects.get(id=mdluserid)
except:
return HttpResponseRedirect('/participant/login')
if str(mdluser.institution).isdigit():
academic = None
try:
academic = AcademicCenter.objects.get(id = mdluser.institution)
except:
pass
if academic:
category = int(request.GET.get('category', 4))
if not (category > 0 and category < 6):
return HttpResponseRedirect('/participant/index/?category=4')
upcoming_workshop = None
upcoming_test = None
past_workshop = None
past_test = None
ongoing_test = None
if category == 3:
upcoming_workshop = Training.objects.filter((Q(status = 0) | Q(status = 1) | Q(status = 2) | Q(status = 3)), academic_id=mdluser.institution, tdate__gte=datetime.date.today()).order_by('-tdate')
if category == 5:
upcoming_test = Test.objects.filter(status=2, academic_id=mdluser.institution, tdate__gt=datetime.date.today()).order_by('-tdate')
if category == 1:
past_workshop = Training.objects.filter(id__in = TrainingAttendance.objects.filter(mdluser_id = mdluser.id).values_list('training_id'), status = 4).order_by('-tdate')
if category == 2:
past_test = Test.objects.filter(id__in = TestAttendance.objects.filter(mdluser_id = mdluser.id).values_list('test_id'), status = 4).order_by('-tdate')
if category == 4:
ongoing_test = Test.objects.filter(status=3, academic_id=mdluser.institution, tdate = datetime.date.today()).order_by('-tdate')
print past_workshop, "******************8"
context = {
'mdluserid' : mdluserid,
'mdlusername' : mdlusername,
'upcoming_workshop' : upcoming_workshop,
'upcoming_test' : upcoming_test,
'past_workshop' : past_workshop,
'past_test' : past_test,
'ongoing_test' : ongoing_test,
'category' : category,
'ONLINE_TEST_URL' : settings.ONLINE_TEST_URL
}
context.update(csrf(request))
return render(request, 'mdl/templates/mdluser_index.html', context)
form = OrganiserForm()
if request.method == 'POST':
form = OrganiserForm(request.POST)
if form.is_valid():
mdluser.institution = form.cleaned_data['college']
mdluser.save()
return HttpResponseRedirect('/participant/index')
context = {
'form' : form
}
context.update(csrf(request))
return render(request, 'mdl/templates/academic.html', context)
@login_required
def offline_details(request, wid, category):
user = request.user
wid = int(wid)
category = int(category)
#print category
user = request.user
form = OfflineDataForm()
try:
if category == 1:
Training.objects.get(pk=wid, status__lt=4)
elif category == 2:
Training.objects.get(pk=wid, status__lt=4)
else:
raise PermissionDenied('You are not allowed to view this page!')
except Exception, e:
raise PermissionDenied('You are not allowed to view this page!')
if request.method == 'POST':
form = OfflineDataForm(request.POST, request.FILES)
try:
if category == 1:
w = Training.objects.get(id = wid)
elif category == 2:
w = Training.objects.get(id = wid)
else:
raise PermissionDenied('You are not allowed to view this page!')
except:
raise PermissionDenied('You are not allowed to view this page!')
if form.is_valid():
file_path = settings.MEDIA_ROOT + 'training/' + str(wid) + str(time.time())
f = request.FILES['xml_file']
fout = open(file_path, 'wb+')
for chunk in f.chunks():
fout.write(chunk)
fout.close()
error_line_no = ''
csv_file_error = 0
csv_file_error, error_line_no = check_csvfile(user, file_path, w, flag=1)
os.unlink(file_path)
#update participant count
update_participants_count(w)
if error_line_no:
messages.error(request, error_line_no)
#update logs
if category == 1:
message = w.academic.institution_name+" has submited Offline "+w.foss.foss+" workshop attendance dated "+w.tdate.strftime("%Y-%m-%d")
update_events_log(user_id = user.id, role = 2, category = 0, category_id = w.id, academic = w.academic_id, status = 5)
update_events_notification(user_id = user.id, role = 2, category = 0, category_id = w.id, academic = w.academic_id, status = 5, message = message)
if not error_line_no:
messages.success(request, "Thank you for uploading the Attendance. Now make sure that you cross check and verify the details before submiting.")
return HttpResponseRedirect('/software-training/workshop/'+str(wid)+'/attendance/')
else:
message = w.academic.institution_name+" has submited Offline training attendance."
update_events_log(user_id = user.id, role = 2, category = 2, category_id = w.id, academic = w.academic_id, status = 5)
update_events_notification(user_id = user.id, role = 2, category = 2, category_id = w.id, academic = w.academic_id, status = 5, message = message)
if not error_line_no:
messages.success(request, "Thank you for uploading the Attendance. Now make sure that you cross check and verify the details before submiting.")
return HttpResponseRedirect('/software-training/training/'+str(wid)+'/attendance/')
messages.error(request, "Please Upload CSV file !")
context = {
'form': form,
}
messages.info(request, """
Please upload the CSV file which you have generated.
To know more <a href="http://process.spoken-tutorial.org/images/9/96/Upload_Attendance.pdf" target="_blank">Click here</a>.
""")
context.update(csrf(request))
return render(request, 'mdl/templates/offline_details.html', context)
def mdl_register(request):
form = RegisterForm()
if request.method == "POST":
form = RegisterForm(request.POST)
#Email exits
try:
user = MdlUser.objects.filter(email=request.POST['email']).first()
if user:
messages.success(request, "Email : "+request.POST['email']+" already registered on this website. Please click <a href='http://www.spoken-tutorial.org/participant/login/'>here </a>to login")
except Exception, e:
#print e
pass
if form.is_valid():
mdluser = MdlUser()
mdluser.auth = 'manual'
mdluser.institution = form.cleaned_data['college']
mdluser.gender = form.cleaned_data['gender']
mdluser.firstname = form.cleaned_data['firstname']
mdluser.lastname = form.cleaned_data['lastname']
mdluser.email = form.cleaned_data['email']
mdluser.username = form.cleaned_data['username']
mdluser.password = encript_password(form.cleaned_data['password'])
mdluser.confirmed = 1
mdluser.mnethostid = 1
mdluser.save()
messages.success(request, "User " + form.cleaned_data['firstname'] +" "+form.cleaned_data['lastname']+" Created!")
return HttpResponseRedirect('/participant/register/')
context = {}
context['form'] = form
context.update(csrf(request))
return render(request, 'mdl/templates/register.html', context)
def feedback(request, wid):
mdluserid = request.session.get('mdluserid')
mdlusername = request.session.get('mdlusername')
if not mdluserid:
return HttpResponseRedirect('/participant/login')
form = FeedbackForm()
mdluserid = request.session.get('mdluserid')
if not mdluserid:
return HttpResponseRedirect('/participant/login')
w = None
try:
w = Training.objects.select_related().get(pk=wid)
#check if feedback already exits
TrainingFeedback.objects.get(training_id = wid, mdluser_id = mdluserid)
messages.success(request, "We have already received your feedback. ")
return HttpResponseRedirect('/participant/index/?category=1')
except Exception, e:
#print e
pass
if request.method == 'POST':
form = FeedbackForm(request.POST)
if form.is_valid():
try:
form_data = form.save(commit=False)
form_data.training_id = wid
form_data.mdluser_id = mdluserid
form_data.save()
try:
wa = TrainingAttendance.objects.get(mdluser_id=mdluserid, training_id = wid)
wa.status = 2
wa.save()
except:
wa = TrainingAttendance()
wa.training_id = wid
wa.mdluser_id = mdluserid
wa.status = 1
wa.save()
messages.success(request, "Thank you for your valuable feedback.")
return HttpResponseRedirect('/participant/index/?category=1')
except Exception, e:
print e
pass
#return HttpResponseRedirect('/participant/index/')
context = {
'form' : form,
'w' : w,
'mdluserid' : mdluserid,
'mdlusername' : mdlusername,
}
context.update(csrf(request))
return render(request, 'mdl/templates/feedback.html', context)
def forget_password(request):
context = {}
form = PasswordResetForm()
if request.method == "POST":
form = PasswordResetForm(request.POST)
if form.is_valid():
password_string = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8))
user = MdlUser.objects.filter(email=request.POST['email']).first()
password_encript = encript_password(password_string)
user.password = password_encript
user.save()
subject = "Spoken Tutorial Online Test password reset"
to = [user.email]
message = '''Hi {0},
Your account password at 'Spoken Tutorials Online Test Center' has been reset
and you have been issued with a new temporary password.
Your current login information is now:
username: {1}
password: {2}
Please go to this page to change your password:
{3}
In most mail programs, this should appear as a blue link
which you can just click on. If that doesn't work,
then cut and paste the address into the address
line at the top of your web browser window.
Cheers from the 'Spoken Tutorials Online Test Center' administrator,
Admin Spoken Tutorials
'''.format(user.firstname, user.username, password_string, 'http://onlinetest.spoken-tutorial.org/login/change_password.php')
# send email
email = EmailMultiAlternatives(
subject, message, 'administrator@spoken-tutorial.org',
to = to, bcc = [], cc = [],
headers={'Reply-To': 'no-replay@spoken-tutorial.org', "Content-type":"text/html;charset=iso-8859-1"}
)
result = email.send(fail_silently=False)
messages.success(request, "New password sent to your email "+user.email)
return HttpResponseRedirect('/participant/login/')
context = {
'form': form
}
context.update(csrf(request))
return render(request, 'mdl/templates/password_reset.html', context)
|
gpl-3.0
| 2,444,503,224,367,037,000
| 41.235955
| 210
| 0.60415
| false
| 4.061588
| true
| false
| false
|
IngenuityEngine/arkMath
|
test/test_helpers.py
|
1
|
2374
|
# Standard modules
from expects import *
# Our modules
import arkInit
arkInit.init()
import tryout
import arkMath
from arkMath import Mat44
class test(tryout.TestSuite):
title = 'test/test_helpers.py'
def is_vector(self):
vec = arkMath.Vec(1,2,3,4)
self.assertEqual(arkMath.isVector(vec), True)
self.assertEqual(arkMath.isVector(12), False)
def ensure_vector(self):
vec = arkMath.Vec(1,2,3,4)
ensured = arkMath.ensureVector(vec)
self.assertEqual(ensured.x, vec.x)
ensured = arkMath.ensureVector(12)
self.assertEqual(ensured.x, 12)
self.assertEqual(ensured.y, 0)
ensured = arkMath.ensureVector(12, 5, 4, 9)
self.assertEqual(ensured.x, 12)
self.assertEqual(ensured.y, 5)
self.assertEqual(ensured.z, 4)
self.assertEqual(ensured.w, 9)
ensured = arkMath.ensureVector([15, 25, 7, 2])
self.assertEqual(ensured.x, 15)
self.assertEqual(ensured.y, 25)
self.assertEqual(ensured.z, 7)
self.assertEqual(ensured.w, 2)
def is_matrix(self):
matList = [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]
matFromList = Mat44(matList)
vec1 = arkMath.Vec(1.0, 0.0, 0.0, 0.0)
vec2 = arkMath.Vec(0.0, 1.0, 0.0, 0.0)
vec3 = arkMath.Vec(0.0, 0.0, 1.0, 0.0)
vec4 = arkMath.Vec(0.0, 0.0, 0.0, 1.0)
matFromVecs = Mat44(vec1, vec2, vec3, vec4)
justVec = arkMath.Vec(1, 2, 3, 4)
self.assertEqual(arkMath.isMatrix(matFromList), True)
self.assertEqual(arkMath.isMatrix(matFromVecs), True)
self.assertEqual(arkMath.isMatrix(justVec), False)
# Should work if input already a matrix, 4 vectors, or 16 matrix values
def ensure_matrix(self):
matList = [1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]
goalMat = Mat44(matList)
sixteenMat = arkMath.ensureMatrix(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0)
self.assertEqual(type(sixteenMat), type(goalMat))
vec1 = arkMath.Vec(1.0, 0.0, 0.0, 0.0)
vec2 = arkMath.Vec(0.0, 1.0, 0.0, 0.0)
vec3 = arkMath.Vec(0.0, 0.0, 1.0, 0.0)
vec4 = arkMath.Vec(0.0, 0.0, 0.0, 1.0)
vecsMat = arkMath.ensureMatrix(vec1, vec2, vec3, vec4)
self.assertEqual(type(vecsMat), type(goalMat))
# Ensure_matrix of already matrix should just return itself
selfMat = arkMath.ensureMatrix(goalMat)
self.assertEqual(type(selfMat), type(goalMat))
if __name__ == '__main__':
tryout.run(test)
|
mit
| 8,659,748,860,809,815,000
| 29.831169
| 115
| 0.667228
| false
| 2.212488
| false
| false
| false
|
leingang/plg
|
plg/utils/decorators.py
|
1
|
1819
|
#!/usr/bin/env python
import logging
def debug_entry(f):
"""
debug the entry into a function
>>> import sys
>>> import logging
The stream configuration is just to make doctests work.
In practice, you'd probably want the default stream sys.stderr.
>>> logging.basicConfig(level=logging.DEBUG,stream=sys.stdout)
>>> @debug_entry
... def f(x):
... return x*x
...
>>> f(2)
DEBUG:f:Entering: arguments=(2,), keyword arguments={}
4
"""
def new_f(*args,**kwargs):
logger=logging.getLogger(f.__name__)
logger.debug("Entering: arguments=%s, keyword arguments=%s",args,kwargs)
return f(*args,**kwargs)
new_f.__name__ = f.__name__
return new_f
def debug_result(f):
"""
Debug the result of a function
>>> import sys
>>> import logging
>>> logging.basicConfig(level=logging.DEBUG,stream=sys.stdout)
>>> @debug_result
... def f(x):
... return x*x
...
>>> f(2)+10
DEBUG:f:Result: 4
14
Decorators can be chained (that's kind of the point!).
>>> @debug_entry
... @debug_result
... def g(x):
... return 2*x
...
>>> g(3)+17
DEBUG:g:Entering: arguments=(3,), keyword arguments={}
DEBUG:g:Result: 6
23
"""
def new_f(*args,**kwargs):
logger=logging.getLogger(f.__name__)
result=f(*args,**kwargs)
logger.debug("Result: %s",repr(result))
return result
new_f.__name__ = f.__name__
return new_f
if __name__ == "__main__":
import doctest
doctest.testmod()
# from decorators import *
# import logging
# logging.basicConfig(level=logging.DEBUG)
# @debug_result
# @debug_entry
# def f(x):
# return x*x
#
#f(2)
|
gpl-3.0
| 2,352,242,054,812,354,000
| 22.333333
| 80
| 0.548103
| false
| 3.674747
| false
| false
| false
|
pitrou/numba
|
numba/targets/arrayobj.py
|
1
|
109012
|
"""
Implementation of operations on Array objects and objects supporting
the buffer protocol.
"""
from __future__ import print_function, absolute_import, division
import math
import llvmlite.llvmpy.core as lc
from llvmlite.llvmpy.core import Constant
import numpy
from numba import types, cgutils, typing
from numba.numpy_support import as_dtype
from numba.numpy_support import version as numpy_version
from numba.targets.imputils import (builtin, builtin_attr, implement,
impl_attribute, impl_attribute_generic,
iternext_impl, impl_ret_borrowed,
impl_ret_new_ref, impl_ret_untracked)
from numba.typing import signature
from . import quicksort, slicing
def increment_index(builder, val):
"""
Increment an index *val*.
"""
one = Constant.int(val.type, 1)
# We pass the "nsw" flag in the hope that LLVM understands the index
# never changes sign. Unfortunately this doesn't always work
# (e.g. ndindex()).
return builder.add(val, one, flags=['nsw'])
def set_range_metadata(builder, load, lower_bound, upper_bound):
"""
Set the "range" metadata on a load instruction.
Note the interval is in the form [lower_bound, upper_bound).
"""
range_operands = [Constant.int(load.type, lower_bound),
Constant.int(load.type, upper_bound)]
md = builder.module.add_metadata(range_operands)
load.set_metadata("range", md)
def mark_positive(builder, load):
"""
Mark the result of a load instruction as positive (or zero).
"""
upper_bound = (1 << (load.type.width - 1)) - 1
set_range_metadata(builder, load, 0, upper_bound)
def make_array(array_type):
"""
Return the Structure representation of the given *array_type*
(an instance of types.Array).
"""
base = cgutils.create_struct_proxy(array_type)
ndim = array_type.ndim
class ArrayStruct(base):
@property
def shape(self):
"""
Override .shape to inform LLVM that its elements are all positive.
"""
builder = self._builder
if ndim == 0:
return base.__getattr__(self, "shape")
# Unfortunately, we can't use llvm.assume as its presence can
# seriously pessimize performance,
# *and* the range metadata currently isn't improving anything here,
# see https://llvm.org/bugs/show_bug.cgi?id=23848 !
ptr = self._get_ptr_by_name("shape")
dims = []
for i in range(ndim):
dimptr = cgutils.gep_inbounds(builder, ptr, 0, i)
load = builder.load(dimptr)
dims.append(load)
mark_positive(builder, load)
return cgutils.pack_array(builder, dims)
return ArrayStruct
def get_itemsize(context, array_type):
"""
Return the item size for the given array or buffer type.
"""
llty = context.get_data_type(array_type.dtype)
return context.get_abi_sizeof(llty)
def load_item(context, builder, arrayty, ptr):
"""
Load the item at the given array pointer.
"""
align = None if arrayty.aligned else 1
return context.unpack_value(builder, arrayty.dtype, ptr,
align=align)
def store_item(context, builder, arrayty, val, ptr):
"""
Store the item at the given array pointer.
"""
align = None if arrayty.aligned else 1
return context.pack_value(builder, arrayty.dtype, val, ptr, align=align)
def fix_integer_index(context, builder, idxty, idx, size):
"""
Fix the integer index' type and value for the given dimension size.
"""
if idxty.signed:
ind = context.cast(builder, idx, idxty, types.intp)
ind = slicing.fix_index(builder, ind, size)
else:
ind = context.cast(builder, idx, idxty, types.uintp)
return ind
def populate_array(array, data, shape, strides, itemsize, meminfo,
parent=None):
"""
Helper function for populating array structures.
This avoids forgetting to set fields.
*shape* and *strides* can be Python tuples or LLVM arrays.
"""
context = array._context
builder = array._builder
datamodel = array._datamodel
required_fields = set(datamodel._fields)
if meminfo is None:
meminfo = Constant.null(context.get_value_type(
datamodel.get_type('meminfo')))
intp_t = context.get_value_type(types.intp)
if isinstance(shape, (tuple, list)):
shape = cgutils.pack_array(builder, shape, intp_t)
if isinstance(strides, (tuple, list)):
strides = cgutils.pack_array(builder, strides, intp_t)
attrs = dict(shape=shape,
strides=strides,
data=data,
itemsize=itemsize,
meminfo=meminfo,)
# Set `parent` attribute
if parent is None:
attrs['parent'] = Constant.null(context.get_value_type(
datamodel.get_type('parent')))
else:
attrs['parent'] = parent
# Calc num of items from shape
nitems = context.get_constant(types.intp, 1)
unpacked_shape = cgutils.unpack_tuple(builder, shape, shape.type.count)
if unpacked_shape:
# Shape is not empty
for axlen in unpacked_shape:
nitems = builder.mul(nitems, axlen)
else:
# Shape is empty
nitems = context.get_constant(types.intp, 1)
attrs['nitems'] = nitems
# Make sure that we have all the fields
got_fields = set(attrs.keys())
if got_fields != required_fields:
raise ValueError("missing {0}".format(required_fields - got_fields))
# Set field value
for k, v in attrs.items():
setattr(array, k, v)
return array
def update_array_info(aryty, array):
"""
Update some auxiliary information in *array* after some of its fields
were changed. `itemsize` and `nitems` are updated.
"""
context = array._context
builder = array._builder
# Calc num of items from shape
nitems = context.get_constant(types.intp, 1)
unpacked_shape = cgutils.unpack_tuple(builder, array.shape, aryty.ndim)
for axlen in unpacked_shape:
nitems = builder.mul(nitems, axlen)
array.nitems = nitems
array.itemsize = context.get_constant(types.intp,
get_itemsize(context, aryty))
def make_arrayiter_cls(iterator_type):
"""
Return the Structure representation of the given *iterator_type* (an
instance of types.ArrayIteratorType).
"""
return cgutils.create_struct_proxy(iterator_type)
@builtin
@implement('getiter', types.Kind(types.Buffer))
def getiter_array(context, builder, sig, args):
[arrayty] = sig.args
[array] = args
iterobj = make_arrayiter_cls(sig.return_type)(context, builder)
zero = context.get_constant(types.intp, 0)
indexptr = cgutils.alloca_once_value(builder, zero)
iterobj.index = indexptr
iterobj.array = array
# Incref array
if context.enable_nrt:
context.nrt_incref(builder, arrayty, array)
res = iterobj._getvalue()
# Note: a decref on the iterator will dereference all internal MemInfo*
out = impl_ret_new_ref(context, builder, sig.return_type, res)
return out
def _getitem_array1d(context, builder, arrayty, array, idx, wraparound):
"""
Look up and return an element from a 1D array.
"""
ptr = cgutils.get_item_pointer(builder, arrayty, array, [idx],
wraparound=wraparound)
return load_item(context, builder, arrayty, ptr)
@builtin
@implement('iternext', types.Kind(types.ArrayIterator))
@iternext_impl
def iternext_array(context, builder, sig, args, result):
[iterty] = sig.args
[iter] = args
arrayty = iterty.array_type
if arrayty.ndim != 1:
# TODO
raise NotImplementedError("iterating over %dD array" % arrayty.ndim)
iterobj = make_arrayiter_cls(iterty)(context, builder, value=iter)
ary = make_array(arrayty)(context, builder, value=iterobj.array)
nitems, = cgutils.unpack_tuple(builder, ary.shape, count=1)
index = builder.load(iterobj.index)
is_valid = builder.icmp(lc.ICMP_SLT, index, nitems)
result.set_valid(is_valid)
with builder.if_then(is_valid):
value = _getitem_array1d(context, builder, arrayty, ary, index,
wraparound=False)
result.yield_(value)
nindex = builder.add(index, context.get_constant(types.intp, 1))
builder.store(nindex, iterobj.index)
#-------------------------------------------------------------------------------
# Basic indexing (with integers and slices only)
def basic_indexing(context, builder, aryty, ary, index_types, indices):
"""
Perform basic indexing on the given array.
A (data pointer, shapes, strides) tuple is returned describing
the corresponding view.
"""
zero = context.get_constant(types.intp, 0)
shapes = cgutils.unpack_tuple(builder, ary.shape, aryty.ndim)
strides = cgutils.unpack_tuple(builder, ary.strides, aryty.ndim)
output_indices = []
output_shapes = []
output_strides = []
ax = 0
for indexval, idxty in zip(indices, index_types):
if idxty is types.ellipsis:
# Fill up missing dimensions at the middle
n_missing = aryty.ndim - len(indices) + 1
for i in range(n_missing):
output_indices.append(zero)
output_shapes.append(shapes[ax])
output_strides.append(strides[ax])
ax += 1
continue
# Regular index value
if idxty == types.slice3_type:
slice = slicing.Slice(context, builder, value=indexval)
cgutils.guard_invalid_slice(context, builder, slice)
slicing.fix_slice(builder, slice, shapes[ax])
output_indices.append(slice.start)
sh = slicing.get_slice_length(builder, slice)
st = slicing.fix_stride(builder, slice, strides[ax])
output_shapes.append(sh)
output_strides.append(st)
elif isinstance(idxty, types.Integer):
ind = fix_integer_index(context, builder, idxty, indexval,
shapes[ax])
output_indices.append(ind)
else:
raise NotImplementedError("unexpected index type: %s" % (idxty,))
ax += 1
# Fill up missing dimensions at the end
assert ax <= aryty.ndim
while ax < aryty.ndim:
output_shapes.append(shapes[ax])
output_strides.append(strides[ax])
ax += 1
# No need to check wraparound, as negative indices were already
# fixed in the loop above.
dataptr = cgutils.get_item_pointer(builder, aryty, ary,
output_indices,
wraparound=False)
return (dataptr, output_shapes, output_strides)
def make_view(context, builder, aryty, ary, return_type,
data, shapes, strides):
"""
Build a view over the given array with the given parameters.
"""
retary = make_array(return_type)(context, builder)
populate_array(retary,
data=data,
shape=shapes,
strides=strides,
itemsize=ary.itemsize,
meminfo=ary.meminfo,
parent=ary.parent)
return retary
def _getitem_array_generic(context, builder, return_type, aryty, ary,
index_types, indices):
"""
Return the result of indexing *ary* with the given *indices*.
"""
assert isinstance(return_type, types.Buffer)
dataptr, view_shapes, view_strides = \
basic_indexing(context, builder, aryty, ary, index_types, indices)
# Build array view
retary = make_view(context, builder, aryty, ary, return_type,
dataptr, view_shapes, view_strides)
return retary._getvalue()
@builtin
@implement('getitem', types.Kind(types.Buffer), types.Kind(types.Integer))
def getitem_arraynd_intp(context, builder, sig, args):
aryty, idxty = sig.args
ary, idx = args
ary = make_array(aryty)(context, builder, ary)
dataptr, shapes, strides = \
basic_indexing(context, builder, aryty, ary, (idxty,), (idx,))
ndim = aryty.ndim
if ndim == 1:
# Return a value
assert not shapes
result = load_item(context, builder, aryty, dataptr)
elif ndim > 1:
# Return a subview over the array
out_ary = make_view(context, builder, aryty, ary, sig.return_type,
dataptr, shapes, strides)
result = out_ary._getvalue()
else:
raise NotImplementedError("1D indexing into %dD array" % aryty.ndim)
return impl_ret_borrowed(context, builder, sig.return_type, result)
@builtin
@implement('getitem', types.Kind(types.Buffer), types.slice3_type)
def getitem_array1d_slice(context, builder, sig, args):
aryty, idxty = sig.args
ary, idx = args
ary = make_array(aryty)(context, builder, value=ary)
res = _getitem_array_generic(context, builder, sig.return_type,
aryty, ary, (idxty,), (idx,))
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement('getitem', types.Kind(types.Buffer), types.Kind(types.BaseTuple))
def getitem_array_tuple(context, builder, sig, args):
aryty, tupty = sig.args
ary, tup = args
ary = make_array(aryty)(context, builder, ary)
index_types = tupty.types
indices = cgutils.unpack_tuple(builder, tup, count=len(tupty))
if any(isinstance(ty, types.Array) for ty in index_types):
return fancy_getitem(context, builder, sig, args,
aryty, ary, index_types, indices)
dataptr, shapes, strides = \
basic_indexing(context, builder, aryty, ary, index_types, indices)
ndim = aryty.ndim
if isinstance(sig.return_type, types.Array):
# Generic array slicing
res = make_view(context, builder, aryty, ary, sig.return_type,
dataptr, shapes, strides)
res = res._getvalue()
else:
# Plain indexing (returning a scalar)
assert not shapes
res = load_item(context, builder, aryty, dataptr)
return impl_ret_borrowed(context, builder ,sig.return_type, res)
@builtin
@implement('setitem', types.Kind(types.Buffer), types.Any, types.Any)
def setitem_array(context, builder, sig, args):
"""
array[a] = scalar_or_array
array[a,..,b] = scalar_or_array
"""
aryty, idxty, valty = sig.args
ary, idx, val = args
if isinstance(idxty, types.BaseTuple):
index_types = idxty.types
indices = cgutils.unpack_tuple(builder, idx, count=len(idxty))
else:
index_types = (idxty,)
indices = (idx,)
ary = make_array(aryty)(context, builder, ary)
# First try basic indexing to see if a single array location is denoted.
try:
dataptr, shapes, strides = \
basic_indexing(context, builder, aryty, ary, index_types, indices)
except NotImplementedError:
use_fancy_indexing = True
else:
use_fancy_indexing = bool(shapes)
if use_fancy_indexing:
# Index describes a non-trivial view => use generic slice assignment
# (NOTE: this also handles scalar broadcasting)
return fancy_setslice(context, builder, sig, args,
index_types, indices)
# Store source value the given location
val = context.cast(builder, val, valty, aryty.dtype)
store_item(context, builder, aryty, val, dataptr)
@builtin
@implement(types.len_type, types.Kind(types.Buffer))
def array_len(context, builder, sig, args):
(aryty,) = sig.args
(ary,) = args
arystty = make_array(aryty)
ary = arystty(context, builder, ary)
shapeary = ary.shape
res = builder.extract_value(shapeary, 0)
return impl_ret_untracked(context, builder, sig.return_type, res)
#-------------------------------------------------------------------------------
# Advanced / fancy indexing
class Indexer(object):
"""
Generic indexer interface, for generating indices over a fancy indexed
array on a single dimension.
"""
def prepare(self):
"""
Prepare the indexer by initializing any required variables, basic
blocks...
"""
raise NotImplementedError
def get_size(self):
"""
Return this dimension's size as an integer.
"""
raise NotImplementedError
def get_shape(self):
"""
Return this dimension's shape as a tuple.
"""
raise NotImplementedError
def loop_head(self):
"""
Start indexation loop. Return a (index, count) tuple.
*index* is an integer LLVM value representing the index over this
dimension.
*count* is either an integer LLVM value representing the current
iteration count, or None if this dimension should be omitted from
the indexation result.
"""
raise NotImplementedError
def loop_tail(self):
"""
Finish indexation loop.
"""
raise NotImplementedError
class EntireIndexer(Indexer):
"""
Compute indices along an entire array dimension.
"""
def __init__(self, context, builder, aryty, ary, dim):
self.context = context
self.builder = builder
self.aryty = aryty
self.ary = ary
self.dim = dim
self.ll_intp = self.context.get_value_type(types.intp)
def prepare(self):
builder = self.builder
self.size = builder.extract_value(self.ary.shape, self.dim)
self.index = cgutils.alloca_once(builder, self.ll_intp)
self.bb_start = builder.append_basic_block()
self.bb_end = builder.append_basic_block()
def get_size(self):
return self.size
def get_shape(self):
return (self.size,)
def loop_head(self):
builder = self.builder
# Initialize loop variable
self.builder.store(Constant.int(self.ll_intp, 0), self.index)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_start)
cur_index = builder.load(self.index)
with builder.if_then(builder.icmp_signed('>=', cur_index, self.size),
likely=False):
builder.branch(self.bb_end)
return cur_index, cur_index
def loop_tail(self):
builder = self.builder
next_index = builder.add(builder.load(self.index),
self.context.get_constant(types.intp, 1))
builder.store(next_index, self.index)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_end)
class IntegerIndexer(Indexer):
"""
Compute indices from a single integer.
"""
def __init__(self, context, builder, idx):
self.context = context
self.builder = builder
self.idx = idx
self.ll_intp = self.context.get_value_type(types.intp)
def prepare(self):
pass
def get_size(self):
return Constant.int(self.ll_intp, 1)
def get_shape(self):
return ()
def loop_head(self):
return self.idx, None
def loop_tail(self):
pass
class IntegerArrayIndexer(Indexer):
"""
Compute indices from an array of integer indices.
"""
def __init__(self, context, builder, idxty, idxary, size):
self.context = context
self.builder = builder
self.idxty = idxty
self.idxary = idxary
self.size = size
assert idxty.ndim == 1
self.ll_intp = self.context.get_value_type(types.intp)
def prepare(self):
builder = self.builder
self.idx_size = cgutils.unpack_tuple(builder, self.idxary.shape)[0]
self.idx_index = cgutils.alloca_once(builder, self.ll_intp)
self.bb_start = builder.append_basic_block()
self.bb_end = builder.append_basic_block()
def get_size(self):
return self.idx_size
def get_shape(self):
return (self.idx_size,)
def loop_head(self):
builder = self.builder
# Initialize loop variable
self.builder.store(Constant.int(self.ll_intp, 0), self.idx_index)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_start)
cur_index = builder.load(self.idx_index)
with builder.if_then(builder.icmp_signed('>=', cur_index, self.idx_size),
likely=False):
builder.branch(self.bb_end)
# Load the actual index from the array of indices
index = _getitem_array1d(self.context, builder,
self.idxty, self.idxary,
cur_index, wraparound=False)
index = fix_integer_index(self.context, builder,
self.idxty.dtype, index, self.size)
return index, cur_index
def loop_tail(self):
builder = self.builder
next_index = builder.add(builder.load(self.idx_index),
Constant.int(self.ll_intp, 1))
builder.store(next_index, self.idx_index)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_end)
class BooleanArrayIndexer(Indexer):
"""
Compute indices from an array of boolean predicates.
"""
def __init__(self, context, builder, idxty, idxary):
self.context = context
self.builder = builder
self.idxty = idxty
self.idxary = idxary
assert idxty.ndim == 1
self.ll_intp = self.context.get_value_type(types.intp)
self.zero = Constant.int(self.ll_intp, 0)
self.one = Constant.int(self.ll_intp, 1)
def prepare(self):
builder = self.builder
self.size = cgutils.unpack_tuple(builder, self.idxary.shape)[0]
self.idx_index = cgutils.alloca_once(builder, self.ll_intp)
self.count = cgutils.alloca_once(builder, self.ll_intp)
self.bb_start = builder.append_basic_block()
self.bb_tail = builder.append_basic_block()
self.bb_end = builder.append_basic_block()
def get_size(self):
builder = self.builder
count = cgutils.alloca_once_value(builder, self.zero)
# Sum all true values
with cgutils.for_range(builder, self.size) as loop:
c = builder.load(count)
pred = _getitem_array1d(self.context, builder,
self.idxty, self.idxary,
loop.index, wraparound=False)
c = builder.add(c, builder.zext(pred, c.type))
builder.store(c, count)
return builder.load(count)
def get_shape(self):
return (self.get_size(),)
def loop_head(self):
builder = self.builder
# Initialize loop variable
self.builder.store(self.zero, self.idx_index)
self.builder.store(self.zero, self.count)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_start)
cur_index = builder.load(self.idx_index)
cur_count = builder.load(self.count)
with builder.if_then(builder.icmp_signed('>=', cur_index, self.size),
likely=False):
builder.branch(self.bb_end)
# Load the predicate and branch if false
pred = _getitem_array1d(self.context, builder,
self.idxty, self.idxary,
cur_index, wraparound=False)
with builder.if_then(builder.not_(pred)):
builder.branch(self.bb_tail)
# Increment the count for next iteration
next_count = builder.add(cur_count, self.one)
builder.store(next_count, self.count)
return cur_index, cur_count
def loop_tail(self):
builder = self.builder
builder.branch(self.bb_tail)
builder.position_at_end(self.bb_tail)
next_index = builder.add(builder.load(self.idx_index), self.one)
builder.store(next_index, self.idx_index)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_end)
class SliceIndexer(Indexer):
"""
Compute indices along a slice.
"""
def __init__(self, context, builder, aryty, ary, dim, slice):
self.context = context
self.builder = builder
self.aryty = aryty
self.ary = ary
self.dim = dim
self.slice = slice
self.ll_intp = self.context.get_value_type(types.intp)
self.zero = Constant.int(self.ll_intp, 0)
self.one = Constant.int(self.ll_intp, 1)
def prepare(self):
builder = self.builder
# Fix slice for the dimension's size
self.dim_size = builder.extract_value(self.ary.shape, self.dim)
cgutils.guard_invalid_slice(self.context, builder, self.slice)
slicing.fix_slice(builder, self.slice, self.dim_size)
self.is_step_negative = cgutils.is_neg_int(builder, self.slice.step)
# Create loop entities
self.index = cgutils.alloca_once(builder, self.ll_intp)
self.count = cgutils.alloca_once(builder, self.ll_intp)
self.bb_start = builder.append_basic_block()
self.bb_end = builder.append_basic_block()
def get_size(self):
return slicing.get_slice_length(self.builder, self.slice)
def get_shape(self):
return (self.get_size(),)
def loop_head(self):
builder = self.builder
# Initialize loop variable
self.builder.store(self.slice.start, self.index)
self.builder.store(self.zero, self.count)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_start)
cur_index = builder.load(self.index)
cur_count = builder.load(self.count)
is_finished = builder.select(self.is_step_negative,
builder.icmp_signed('<=', cur_index,
self.slice.stop),
builder.icmp_signed('>=', cur_index,
self.slice.stop))
with builder.if_then(is_finished, likely=False):
builder.branch(self.bb_end)
return cur_index, cur_count
def loop_tail(self):
builder = self.builder
next_index = builder.add(builder.load(self.index), self.slice.step)
builder.store(next_index, self.index)
next_count = builder.add(builder.load(self.count), self.one)
builder.store(next_count, self.count)
builder.branch(self.bb_start)
builder.position_at_end(self.bb_end)
class FancyIndexer(object):
"""
Perform fancy indexing on the given array.
"""
def __init__(self, context, builder, aryty, ary, index_types, indices):
self.context = context
self.builder = builder
self.aryty = ary
self.shapes = cgutils.unpack_tuple(builder, ary.shape, aryty.ndim)
self.strides = cgutils.unpack_tuple(builder, ary.strides, aryty.ndim)
indexers = []
ax = 0
for indexval, idxty in zip(indices, index_types):
if idxty is types.ellipsis:
# Fill up missing dimensions at the middle
n_missing = aryty.ndim - len(indices) + 1
for i in range(n_missing):
indexer = EntireIndexer(context, builder, aryty, ary, ax)
indexers.append(indexer)
ax += 1
continue
# Regular index value
if idxty == types.slice3_type:
slice = slicing.Slice(context, builder, value=indexval)
indexer = SliceIndexer(context, builder, aryty, ary, ax, slice)
indexers.append(indexer)
elif isinstance(idxty, types.Integer):
ind = fix_integer_index(context, builder, idxty, indexval,
self.shapes[ax])
indexer = IntegerIndexer(context, builder, ind)
indexers.append(indexer)
elif isinstance(idxty, types.Array):
idxary = make_array(idxty)(context, builder, indexval)
if isinstance(idxty.dtype, types.Integer):
indexer = IntegerArrayIndexer(context, builder,
idxty, idxary,
self.shapes[ax])
elif isinstance(idxty.dtype, types.Boolean):
indexer = BooleanArrayIndexer(context, builder,
idxty, idxary)
else:
assert 0
indexers.append(indexer)
else:
raise AssertionError("unexpected index type: %s" % (idxty,))
ax += 1
# Fill up missing dimensions at the end
assert ax <= aryty.ndim, (ax, aryty.ndim)
while ax < aryty.ndim:
indexer = EntireIndexer(context, builder, aryty, ary, ax)
indexers.append(indexer)
ax += 1
assert len(indexers) == aryty.ndim, (len(indexers), aryty.ndim)
self.indexers = indexers
def prepare(self):
for i in self.indexers:
i.prepare()
def get_shape(self):
"""
Get the resulting shape as Python tuple.
"""
return sum([i.get_shape() for i in self.indexers], ())
def begin_loops(self):
indices, counts = zip(*(i.loop_head() for i in self.indexers))
return indices, counts
def end_loops(self):
for i in reversed(self.indexers):
i.loop_tail()
def fancy_getitem(context, builder, sig, args,
aryty, ary, index_types, indices):
shapes = cgutils.unpack_tuple(builder, ary.shape)
strides = cgutils.unpack_tuple(builder, ary.strides)
data = ary.data
indexer = FancyIndexer(context, builder, aryty, ary,
index_types, indices)
indexer.prepare()
# Construct output array
out_ty = sig.return_type
out_shapes = indexer.get_shape()
out = _empty_nd_impl(context, builder, out_ty, out_shapes)
out_data = out.data
out_idx = cgutils.alloca_once_value(builder,
context.get_constant(types.intp, 0))
# Loop on source and copy to destination
indices, _ = indexer.begin_loops()
# No need to check for wraparound, as the indexers all ensure
# a positive index is returned.
ptr = cgutils.get_item_pointer2(builder, data, shapes, strides,
aryty.layout, indices, wraparound=False)
val = load_item(context, builder, aryty, ptr)
# Since the destination is C-contiguous, no need for multi-dimensional
# indexing.
cur = builder.load(out_idx)
ptr = builder.gep(out_data, [cur])
store_item(context, builder, out_ty, val, ptr)
next_idx = builder.add(cur, context.get_constant(types.intp, 1))
builder.store(next_idx, out_idx)
indexer.end_loops()
return impl_ret_new_ref(context, builder, out_ty, out._getvalue())
@builtin
@implement('getitem', types.Kind(types.Buffer), types.Kind(types.Array))
def fancy_getitem_array(context, builder, sig, args):
aryty, idxty = sig.args
ary, idx = args
ary = make_array(aryty)(context, builder, ary)
out_ty = sig.return_type
return fancy_getitem(context, builder, sig, args,
aryty, ary, (idxty,), (idx,))
def fancy_setslice(context, builder, sig, args, index_types, indices):
"""
Implement slice assignment for arrays. This implementation works for
basic as well as fancy indexing, since there's no functional difference
between the two for indexed assignment.
"""
aryty, _, srcty = sig.args
ary, _, src = args
ary = make_array(aryty)(context, builder, ary)
dest_shapes = cgutils.unpack_tuple(builder, ary.shape)
dest_strides = cgutils.unpack_tuple(builder, ary.strides)
dest_data = ary.data
indexer = FancyIndexer(context, builder, aryty, ary,
index_types, indices)
indexer.prepare()
if isinstance(srcty, types.Buffer):
# Source is an array
src = make_array(srcty)(context, builder, src)
src_shapes = cgutils.unpack_tuple(builder, src.shape)
src_strides = cgutils.unpack_tuple(builder, src.strides)
src_data = src.data
src_dtype = srcty.dtype
# Check shapes are equal
index_shape = indexer.get_shape()
shape_error = cgutils.false_bit
assert len(index_shape) == len(src_shapes)
for u, v in zip(src_shapes, index_shape):
shape_error = builder.or_(shape_error,
builder.icmp_signed('!=', u, v))
with builder.if_then(shape_error, likely=False):
msg = "cannot assign slice from input of different size"
context.call_conv.return_user_exc(builder, ValueError, (msg,))
def src_getitem(source_indices):
assert len(source_indices) == srcty.ndim
src_ptr = cgutils.get_item_pointer2(builder, src_data,
src_shapes, src_strides,
srcty.layout, source_indices,
wraparound=False)
return load_item(context, builder, srcty, src_ptr)
else:
# Source is a scalar (broadcast or not, depending on destination
# shape).
src_dtype = srcty
def src_getitem(source_indices):
return src
# Loop on destination and copy from source to destination
dest_indices, counts = indexer.begin_loops()
# Source is iterated in natural order
source_indices = tuple(c for c in counts if c is not None)
val = src_getitem(source_indices)
# Cast to the destination dtype (cross-dtype slice assignement is allowed)
val = context.cast(builder, val, src_dtype, aryty.dtype)
# No need to check for wraparound, as the indexers all ensure
# a positive index is returned.
dest_ptr = cgutils.get_item_pointer2(builder, dest_data,
dest_shapes, dest_strides,
aryty.layout, dest_indices,
wraparound=False)
store_item(context, builder, aryty, val, dest_ptr)
indexer.end_loops()
return context.get_dummy_value()
#-------------------------------------------------------------------------------
# Shape / layout altering
@builtin
@implement('array.transpose', types.Kind(types.Array))
def array_transpose(context, builder, sig, args):
return array_T(context, builder, sig.args[0], args[0])
def array_T(context, builder, typ, value):
if typ.ndim <= 1:
res = value
else:
ary = make_array(typ)(context, builder, value)
ret = make_array(typ)(context, builder)
shapes = cgutils.unpack_tuple(builder, ary.shape, typ.ndim)
strides = cgutils.unpack_tuple(builder, ary.strides, typ.ndim)
populate_array(ret,
data=ary.data,
shape=cgutils.pack_array(builder, shapes[::-1]),
strides=cgutils.pack_array(builder, strides[::-1]),
itemsize=ary.itemsize,
meminfo=ary.meminfo,
parent=ary.parent)
res = ret._getvalue()
return impl_ret_borrowed(context, builder, typ, res)
builtin_attr(impl_attribute(types.Kind(types.Array), 'T')(array_T))
def _attempt_nocopy_reshape(context, builder, aryty, ary, newnd, newshape,
newstrides):
"""
Call into Numba_attempt_nocopy_reshape() for the given array type
and instance, and the specified new shape. The array pointed to
by *newstrides* will be filled up if successful.
"""
ll_intp = context.get_value_type(types.intp)
ll_intp_star = ll_intp.as_pointer()
ll_intc = context.get_value_type(types.intc)
fnty = lc.Type.function(ll_intc, [ll_intp, ll_intp_star, ll_intp_star,
ll_intp, ll_intp_star, ll_intp_star,
ll_intp, ll_intc])
fn = builder.module.get_or_insert_function(
fnty, name="numba_attempt_nocopy_reshape")
nd = lc.Constant.int(ll_intp, aryty.ndim)
shape = cgutils.gep_inbounds(builder, ary._get_ptr_by_name('shape'), 0, 0)
strides = cgutils.gep_inbounds(builder, ary._get_ptr_by_name('strides'), 0, 0)
newnd = lc.Constant.int(ll_intp, newnd)
newshape = cgutils.gep_inbounds(builder, newshape, 0, 0)
newstrides = cgutils.gep_inbounds(builder, newstrides, 0, 0)
is_f_order = lc.Constant.int(ll_intc, 0)
res = builder.call(fn, [nd, shape, strides,
newnd, newshape, newstrides,
ary.itemsize, is_f_order])
return res
@builtin
@implement('array.reshape', types.Kind(types.Array), types.Kind(types.BaseTuple))
def array_reshape(context, builder, sig, args):
aryty = sig.args[0]
retty = sig.return_type
shapety = sig.args[1]
shape = args[1]
ll_intp = context.get_value_type(types.intp)
ll_shape = lc.Type.array(ll_intp, shapety.count)
ary = make_array(aryty)(context, builder, args[0])
# XXX unknown dimension (-1) is unhandled
# Check requested size
newsize = lc.Constant.int(ll_intp, 1)
for s in cgutils.unpack_tuple(builder, shape):
newsize = builder.mul(newsize, s)
size = lc.Constant.int(ll_intp, 1)
for s in cgutils.unpack_tuple(builder, ary.shape):
size = builder.mul(size, s)
fail = builder.icmp_unsigned('!=', size, newsize)
with builder.if_then(fail):
msg = "total size of new array must be unchanged"
context.call_conv.return_user_exc(builder, ValueError, (msg,))
newnd = shapety.count
newshape = cgutils.alloca_once(builder, ll_shape)
builder.store(shape, newshape)
newstrides = cgutils.alloca_once(builder, ll_shape)
ok = _attempt_nocopy_reshape(context, builder, aryty, ary, newnd,
newshape, newstrides)
fail = builder.icmp_unsigned('==', ok, lc.Constant.int(ok.type, 0))
with builder.if_then(fail):
msg = "incompatible shape for array"
context.call_conv.return_user_exc(builder, NotImplementedError, (msg,))
ret = make_array(retty)(context, builder)
populate_array(ret,
data=ary.data,
shape=builder.load(newshape),
strides=builder.load(newstrides),
itemsize=ary.itemsize,
meminfo=ary.meminfo,
parent=ary.parent)
res = ret._getvalue()
return impl_ret_borrowed(context, builder, sig.return_type, res)
def _change_dtype(context, builder, oldty, newty, ary):
"""
Attempt to fix up *ary* for switching from *oldty* to *newty*.
See Numpy's array_descr_set()
(np/core/src/multiarray/getset.c).
Attempt to fix the array's shape and strides for a new dtype.
False is returned on failure, True on success.
"""
assert oldty.ndim == newty.ndim
assert oldty.layout == newty.layout
new_layout = ord(newty.layout)
any_layout = ord('A')
c_layout = ord('C')
f_layout = ord('F')
int8 = types.int8
def imp(nd, dims, strides, old_itemsize, new_itemsize, layout):
# Attempt to update the layout due to limitation of the numba
# type system.
if layout == any_layout:
# Test rightmost stride to be contiguous
if strides[-1] == old_itemsize:
# Process this as if it is C contiguous
layout = int8(c_layout)
# Test leftmost stride to be F contiguous
elif strides[0] == old_itemsize:
# Process this as if it is F contiguous
layout = int8(f_layout)
if old_itemsize != new_itemsize and (layout == any_layout or nd == 0):
return False
if layout == c_layout:
i = nd - 1
else:
i = 0
if new_itemsize < old_itemsize:
# If it is compatible, increase the size of the dimension
# at the end (or at the front if F-contiguous)
if (old_itemsize % new_itemsize) != 0:
return False
newdim = old_itemsize // new_itemsize
dims[i] *= newdim
strides[i] = new_itemsize
elif new_itemsize > old_itemsize:
# Determine if last (or first if F-contiguous) dimension
# is compatible
bytelength = dims[i] * old_itemsize
if (bytelength % new_itemsize) != 0:
return False
dims[i] = bytelength // new_itemsize
strides[i] = new_itemsize
else:
# Same item size: nothing to do (this also works for
# non-contiguous arrays).
pass
return True
old_itemsize = context.get_constant(types.intp,
get_itemsize(context, oldty))
new_itemsize = context.get_constant(types.intp,
get_itemsize(context, newty))
nd = context.get_constant(types.intp, newty.ndim)
shape_data = cgutils.gep_inbounds(builder, ary._get_ptr_by_name('shape'),
0, 0)
strides_data = cgutils.gep_inbounds(builder,
ary._get_ptr_by_name('strides'), 0, 0)
shape_strides_array_type = types.Array(dtype=types.intp, ndim=1, layout='C')
arycls = context.make_array(shape_strides_array_type)
shape_constant = cgutils.pack_array(builder,
[context.get_constant(types.intp,
newty.ndim)])
sizeof_intp = context.get_abi_sizeof(context.get_data_type(types.intp))
sizeof_intp = context.get_constant(types.intp, sizeof_intp)
strides_constant = cgutils.pack_array(builder, [sizeof_intp])
shape_ary = arycls(context, builder)
populate_array(shape_ary,
data=shape_data,
shape=shape_constant,
strides=strides_constant,
itemsize=sizeof_intp,
meminfo=None)
strides_ary = arycls(context, builder)
populate_array(strides_ary,
data=strides_data,
shape=shape_constant,
strides=strides_constant,
itemsize=sizeof_intp,
meminfo=None)
shape = shape_ary._getvalue()
strides = strides_ary._getvalue()
args = [nd, shape, strides, old_itemsize, new_itemsize,
context.get_constant(types.int8, new_layout)]
sig = signature(types.boolean,
types.intp, # nd
shape_strides_array_type, # dims
shape_strides_array_type, # strides
types.intp, # old_itemsize
types.intp, # new_itemsize
types.int8, # layout
)
res = context.compile_internal(builder, imp, sig, args)
update_array_info(newty, ary)
res = impl_ret_borrowed(context, builder, sig.return_type, res)
return res
@builtin
@implement('array.view', types.Kind(types.Array), types.Kind(types.DTypeSpec))
def array_view(context, builder, sig, args):
aryty = sig.args[0]
retty = sig.return_type
ary = make_array(aryty)(context, builder, args[0])
ret = make_array(retty)(context, builder)
# Copy all fields, casting the "data" pointer appropriately
fields = set(ret._datamodel._fields)
for k in sorted(fields):
val = getattr(ary, k)
if k == 'data':
ptrty = ret.data.type
ret.data = builder.bitcast(val, ptrty)
else:
setattr(ret, k, val)
ok = _change_dtype(context, builder, aryty, retty, ret)
fail = builder.icmp_unsigned('==', ok, lc.Constant.int(ok.type, 0))
with builder.if_then(fail):
msg = "new type not compatible with array"
context.call_conv.return_user_exc(builder, ValueError, (msg,))
res = ret._getvalue()
return impl_ret_borrowed(context, builder, sig.return_type, res)
#-------------------------------------------------------------------------------
# Computations
@builtin
@implement(numpy.sum, types.Kind(types.Array))
@implement("array.sum", types.Kind(types.Array))
def array_sum(context, builder, sig, args):
zero = sig.return_type(0)
def array_sum_impl(arr):
c = zero
for v in arr.flat:
c += v
return c
res = context.compile_internal(builder, array_sum_impl, sig, args,
locals=dict(c=sig.return_type))
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement(numpy.prod, types.Kind(types.Array))
@implement("array.prod", types.Kind(types.Array))
def array_prod(context, builder, sig, args):
def array_prod_impl(arr):
c = 1
for v in arr.flat:
c *= v
return c
res = context.compile_internal(builder, array_prod_impl, sig, args,
locals=dict(c=sig.return_type))
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement(numpy.cumsum, types.Kind(types.Array))
@implement("array.cumsum", types.Kind(types.Array))
def array_cumsum(context, builder, sig, args):
scalar_dtype = sig.return_type.dtype
dtype = as_dtype(scalar_dtype)
zero = scalar_dtype(0)
def array_cumsum_impl(arr):
size = 1
for i in arr.shape:
size = size * i
out = numpy.empty(size, dtype)
c = zero
for idx, v in enumerate(arr.flat):
c += v
out[idx] = c
return out
res = context.compile_internal(builder, array_cumsum_impl, sig, args,
locals=dict(c=scalar_dtype))
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.cumprod, types.Kind(types.Array))
@implement("array.cumprod", types.Kind(types.Array))
def array_cumprod(context, builder, sig, args):
scalar_dtype = sig.return_type.dtype
dtype = as_dtype(scalar_dtype)
def array_cumprod_impl(arr):
size = 1
for i in arr.shape:
size = size * i
out = numpy.empty(size, dtype)
c = 1
for idx, v in enumerate(arr.flat):
c *= v
out[idx] = c
return out
res = context.compile_internal(builder, array_cumprod_impl, sig, args,
locals=dict(c=scalar_dtype))
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.mean, types.Kind(types.Array))
@implement("array.mean", types.Kind(types.Array))
def array_mean(context, builder, sig, args):
zero = sig.return_type(0)
def array_mean_impl(arr):
# Can't use the naive `arr.sum() / arr.size`, as it would return
# a wrong result on integer sum overflow.
c = zero
for v in arr.flat:
c += v
return c / arr.size
res = context.compile_internal(builder, array_mean_impl, sig, args,
locals=dict(c=sig.return_type))
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.var, types.Kind(types.Array))
@implement("array.var", types.Kind(types.Array))
def array_var(context, builder, sig, args):
def array_var_impl(arry):
# Compute the mean
m = arry.mean()
# Compute the sum of square diffs
ssd = 0
for v in arry.flat:
ssd += (v - m) ** 2
return ssd / arry.size
res = context.compile_internal(builder, array_var_impl, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.std, types.Kind(types.Array))
@implement("array.std", types.Kind(types.Array))
def array_std(context, builder, sig, args):
def array_std_impl(arry):
return arry.var() ** 0.5
res = context.compile_internal(builder, array_std_impl, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.min, types.Kind(types.Array))
@implement("array.min", types.Kind(types.Array))
def array_min(context, builder, sig, args):
ty = sig.args[0].dtype
if isinstance(ty, (types.NPDatetime, types.NPTimedelta)):
# NaT is smaller than every other value, but it is
# ignored as far as min() is concerned.
nat = ty('NaT')
def array_min_impl(arry):
min_value = nat
it = arry.flat
for v in it:
if v != nat:
min_value = v
break
for v in it:
if v != nat and v < min_value:
min_value = v
return min_value
else:
def array_min_impl(arry):
for v in arry.flat:
min_value = v
break
for v in arry.flat:
if v < min_value:
min_value = v
return min_value
res = context.compile_internal(builder, array_min_impl, sig, args)
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement(numpy.max, types.Kind(types.Array))
@implement("array.max", types.Kind(types.Array))
def array_max(context, builder, sig, args):
def array_max_impl(arry):
for v in arry.flat:
max_value = v
break
for v in arry.flat:
if v > max_value:
max_value = v
return max_value
res = context.compile_internal(builder, array_max_impl, sig, args)
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement(numpy.argmin, types.Kind(types.Array))
@implement("array.argmin", types.Kind(types.Array))
def array_argmin(context, builder, sig, args):
ty = sig.args[0].dtype
# NOTE: Under Numpy < 1.10, argmin() is inconsistent with min() on NaT values:
# https://github.com/numpy/numpy/issues/6030
if (numpy_version >= (1, 10) and
isinstance(ty, (types.NPDatetime, types.NPTimedelta))):
# NaT is smaller than every other value, but it is
# ignored as far as argmin() is concerned.
nat = ty('NaT')
def array_argmin_impl(arry):
min_value = nat
min_idx = 0
it = arry.flat
idx = 0
for v in it:
if v != nat:
min_value = v
min_idx = idx
idx += 1
break
idx += 1
for v in it:
if v != nat and v < min_value:
min_value = v
min_idx = idx
idx += 1
return min_idx
else:
def array_argmin_impl(arry):
for v in arry.flat:
min_value = v
min_idx = 0
break
idx = 0
for v in arry.flat:
if v < min_value:
min_value = v
min_idx = idx
idx += 1
return min_idx
res = context.compile_internal(builder, array_argmin_impl, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.argmax, types.Kind(types.Array))
@implement("array.argmax", types.Kind(types.Array))
def array_argmax(context, builder, sig, args):
def array_argmax_impl(arry):
for v in arry.flat:
max_value = v
max_idx = 0
break
idx = 0
for v in arry.flat:
if v > max_value:
max_value = v
max_idx = idx
idx += 1
return max_idx
res = context.compile_internal(builder, array_argmax_impl, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.median, types.Kind(types.Array))
def array_median(context, builder, sig, args):
def partition(A, low, high):
mid = (low+high) // 2
# median of three {low, middle, high}
LM = A[low] <= A[mid]
MH = A[mid] <= A[high]
LH = A[low] <= A[high]
if LM == MH:
median3 = mid
elif LH != LM:
median3 = low
else:
median3 = high
# choose median3 as the pivot
A[high], A[median3] = A[median3], A[high]
x = A[high]
i = low
for j in range(low, high):
if A[j] <= x:
A[i], A[j] = A[j], A[i]
i += 1
A[i], A[high] = A[high], A[i]
return i
sig_partition = typing.signature(types.intp, *(sig.args[0], types.intp, types.intp))
_partition = context.compile_subroutine(builder, partition, sig_partition)
def select(arry, k):
n = arry.shape[0]
# XXX: assuming flat array till array.flatten is implemented
# temp_arry = arry.flatten()
temp_arry = arry.copy()
high = n-1
low = 0
# NOTE: high is inclusive
i = _partition(temp_arry, low, high)
while i != k:
if i < k:
low = i+1
i = _partition(temp_arry, low, high)
else:
high = i-1
i = _partition(temp_arry, low, high)
return temp_arry[k]
sig_select = typing.signature(sig.args[0].dtype, *(sig.args[0], types.intp))
_select = context.compile_subroutine(builder, select, sig_select)
def median(arry):
n = arry.shape[0]
if n % 2 == 0:
return (_select(arry, n//2 - 1) + _select(arry, n//2))/2
else:
return _select(arry, n//2)
res = context.compile_internal(builder, median, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
def _np_round_intrinsic(tp):
# np.round() always rounds half to even
return "llvm.rint.f%d" % (tp.bitwidth,)
def _np_round_float(context, builder, tp, val):
llty = context.get_value_type(tp)
module = builder.module
fnty = lc.Type.function(llty, [llty])
fn = module.get_or_insert_function(fnty, name=_np_round_intrinsic(tp))
return builder.call(fn, (val,))
@builtin
@implement(numpy.round, types.Kind(types.Float))
def scalar_round_unary(context, builder, sig, args):
res = _np_round_float(context, builder, sig.args[0], args[0])
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.round, types.Kind(types.Integer))
def scalar_round_unary(context, builder, sig, args):
res = args[0]
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.round, types.Kind(types.Complex))
def scalar_round_unary_complex(context, builder, sig, args):
fltty = sig.args[0].underlying_float
cplx_cls = context.make_complex(sig.args[0])
z = cplx_cls(context, builder, args[0])
z.real = _np_round_float(context, builder, fltty, z.real)
z.imag = _np_round_float(context, builder, fltty, z.imag)
res = z._getvalue()
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.round, types.Kind(types.Float), types.Kind(types.Integer))
@implement(numpy.round, types.Kind(types.Integer), types.Kind(types.Integer))
def scalar_round_binary_float(context, builder, sig, args):
def round_ndigits(x, ndigits):
if math.isinf(x) or math.isnan(x):
return x
# NOTE: this is CPython's algorithm, but perhaps this is overkill
# when emulating Numpy's behaviour.
if ndigits >= 0:
if ndigits > 22:
# pow1 and pow2 are each safe from overflow, but
# pow1*pow2 ~= pow(10.0, ndigits) might overflow.
pow1 = 10.0 ** (ndigits - 22)
pow2 = 1e22
else:
pow1 = 10.0 ** ndigits
pow2 = 1.0
y = (x * pow1) * pow2
if math.isinf(y):
return x
return (numpy.round(y) / pow2) / pow1
else:
pow1 = 10.0 ** (-ndigits)
y = x / pow1
return numpy.round(y) * pow1
res = context.compile_internal(builder, round_ndigits, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.round, types.Kind(types.Complex), types.Kind(types.Integer))
def scalar_round_binary_complex(context, builder, sig, args):
def round_ndigits(z, ndigits):
return complex(numpy.round(z.real, ndigits),
numpy.round(z.imag, ndigits))
res = context.compile_internal(builder, round_ndigits, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.round, types.Kind(types.Array), types.Kind(types.Integer),
types.Kind(types.Array))
def array_round(context, builder, sig, args):
def array_round_impl(arr, decimals, out):
if arr.shape != out.shape:
raise ValueError("invalid output shape")
for index, val in numpy.ndenumerate(arr):
out[index] = numpy.round(val, decimals)
return out
res = context.compile_internal(builder, array_round_impl, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.sinc, types.Kind(types.Array))
def array_sinc(context, builder, sig, args):
def array_sinc_impl(arr):
out = numpy.zeros_like(arr)
for index, val in numpy.ndenumerate(arr):
out[index] = numpy.sinc(val)
return out
res = context.compile_internal(builder, array_sinc_impl, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.sinc, types.Kind(types.Number))
def scalar_sinc(context, builder, sig, args):
scalar_dtype = sig.return_type
def scalar_sinc_impl(val):
if numpy.fabs(val) == 0.e0: # to match np impl
val = 1e-20
val *= numpy.pi # np sinc is the normalised variant
return numpy.sin(val)/val
res = context.compile_internal(builder, scalar_sinc_impl, sig, args,
locals=dict(c=scalar_dtype))
return impl_ret_untracked(context, builder, sig.return_type, res)
@builtin
@implement(numpy.nonzero, types.Kind(types.Array))
@implement("array.nonzero", types.Kind(types.Array))
@implement(numpy.where, types.Kind(types.Array))
def array_nonzero(context, builder, sig, args):
aryty = sig.args[0]
# Return type is a N-tuple of 1D C-contiguous arrays
retty = sig.return_type
outaryty = retty.dtype
ndim = aryty.ndim
nouts = retty.count
ary = make_array(aryty)(context, builder, args[0])
shape = cgutils.unpack_tuple(builder, ary.shape)
strides = cgutils.unpack_tuple(builder, ary.strides)
data = ary.data
layout = aryty.layout
# First count the number of non-zero elements
zero = context.get_constant(types.intp, 0)
one = context.get_constant(types.intp, 1)
count = cgutils.alloca_once_value(builder, zero)
with cgutils.loop_nest(builder, shape, zero.type) as indices:
ptr = cgutils.get_item_pointer2(builder, data, shape, strides,
layout, indices)
val = load_item(context, builder, aryty, ptr)
nz = context.is_true(builder, aryty.dtype, val)
with builder.if_then(nz):
builder.store(builder.add(builder.load(count), one), count)
# Then allocate output arrays of the right size
out_shape = (builder.load(count),)
outs = [_empty_nd_impl(context, builder, outaryty, out_shape)._getvalue()
for i in range(nouts)]
outarys = [make_array(outaryty)(context, builder, out) for out in outs]
out_datas = [out.data for out in outarys]
# And fill them up
index = cgutils.alloca_once_value(builder, zero)
with cgutils.loop_nest(builder, shape, zero.type) as indices:
ptr = cgutils.get_item_pointer2(builder, data, shape, strides,
layout, indices)
val = load_item(context, builder, aryty, ptr)
nz = context.is_true(builder, aryty.dtype, val)
with builder.if_then(nz):
# Store element indices in output arrays
if not indices:
# For a 0-d array, store 0 in the unique output array
indices = (zero,)
cur = builder.load(index)
for i in range(nouts):
ptr = cgutils.get_item_pointer2(builder, out_datas[i],
out_shape, (),
'C', [cur])
store_item(context, builder, outaryty, indices[i], ptr)
builder.store(builder.add(cur, one), index)
tup = context.make_tuple(builder, sig.return_type, outs)
return impl_ret_new_ref(context, builder, sig.return_type, tup)
@builtin
@implement(numpy.where, types.Kind(types.Array),
types.Kind(types.Array), types.Kind(types.Array))
def array_where(context, builder, sig, args):
layouts = set(a.layout for a in sig.args)
if layouts == set('C'):
# Faster implementation for C-contiguous arrays
def where_impl(cond, x, y):
shape = cond.shape
if x.shape != shape or y.shape != shape:
raise ValueError("all inputs should have the same shape")
res = numpy.empty_like(x)
cf = cond.flat
xf = x.flat
yf = y.flat
rf = res.flat
for i in range(cond.size):
rf[i] = xf[i] if cf[i] else yf[i]
return res
else:
def where_impl(cond, x, y):
shape = cond.shape
if x.shape != shape or y.shape != shape:
raise ValueError("all inputs should have the same shape")
res = numpy.empty_like(x)
for idx, c in numpy.ndenumerate(cond):
res[idx] = x[idx] if c else y[idx]
return res
res = context.compile_internal(builder, where_impl, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
#-------------------------------------------------------------------------------
# Array attributes
@builtin_attr
@impl_attribute(types.Kind(types.Array), "dtype", types.Kind(types.DType))
def array_dtype(context, builder, typ, value):
res = context.get_dummy_value()
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.Array), "shape", types.Kind(types.UniTuple))
@impl_attribute(types.Kind(types.MemoryView), "shape", types.Kind(types.UniTuple))
def array_shape(context, builder, typ, value):
arrayty = make_array(typ)
array = arrayty(context, builder, value)
res = array.shape
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.Array), "strides", types.Kind(types.UniTuple))
@impl_attribute(types.Kind(types.MemoryView), "strides", types.Kind(types.UniTuple))
def array_strides(context, builder, typ, value):
arrayty = make_array(typ)
array = arrayty(context, builder, value)
res = array.strides
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.Array), "ndim", types.intp)
@impl_attribute(types.Kind(types.MemoryView), "ndim", types.intp)
def array_ndim(context, builder, typ, value):
res = context.get_constant(types.intp, typ.ndim)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.Array), "size", types.intp)
def array_size(context, builder, typ, value):
arrayty = make_array(typ)
array = arrayty(context, builder, value)
res = array.nitems
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.Array), "itemsize", types.intp)
@impl_attribute(types.Kind(types.MemoryView), "itemsize", types.intp)
def array_itemsize(context, builder, typ, value):
arrayty = make_array(typ)
array = arrayty(context, builder, value)
res = array.itemsize
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.MemoryView), "nbytes", types.intp)
def array_nbytes(context, builder, typ, value):
"""
nbytes = size * itemsize
"""
arrayty = make_array(typ)
array = arrayty(context, builder, value)
dims = cgutils.unpack_tuple(builder, array.shape, typ.ndim)
res = builder.mul(array.nitems, array.itemsize)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.MemoryView), "contiguous", types.boolean)
def array_contiguous(context, builder, typ, value):
res = context.get_constant(types.boolean, typ.is_contig)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.MemoryView), "c_contiguous", types.boolean)
def array_c_contiguous(context, builder, typ, value):
res = context.get_constant(types.boolean, typ.is_c_contig)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.MemoryView), "f_contiguous", types.boolean)
def array_f_contiguous(context, builder, typ, value):
res = context.get_constant(types.boolean, typ.is_f_contig)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.MemoryView), "readonly", types.boolean)
def array_readonly(context, builder, typ, value):
res = context.get_constant(types.boolean, not typ.mutable)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.Array), "ctypes",
types.Kind(types.ArrayCTypes))
def array_ctypes(context, builder, typ, value):
arrayty = make_array(typ)
array = arrayty(context, builder, value)
# Cast void* data to uintp
addr = builder.ptrtoint(array.data, context.get_value_type(types.uintp))
# Create new ArrayCType structure
ctinfo_type = cgutils.create_struct_proxy(types.ArrayCTypes(typ))
ctinfo = ctinfo_type(context, builder)
ctinfo.data = addr
res = ctinfo._getvalue()
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.Array), "flags", types.Kind(types.ArrayFlags))
def array_flags(context, builder, typ, value):
res = context.get_dummy_value()
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.ArrayCTypes), "data", types.uintp)
def array_ctypes_data(context, builder, typ, value):
ctinfo_type = cgutils.create_struct_proxy(typ)
ctinfo = ctinfo_type(context, builder, value=value)
res = ctinfo.data
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.ArrayFlags), "contiguous", types.boolean)
@impl_attribute(types.Kind(types.ArrayFlags), "c_contiguous", types.boolean)
def array_ctypes_data(context, builder, typ, value):
val = typ.array_type.layout == 'C'
res = context.get_constant(types.boolean, val)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute(types.Kind(types.ArrayFlags), "f_contiguous", types.boolean)
def array_ctypes_data(context, builder, typ, value):
layout = typ.array_type.layout
val = layout == 'F' if typ.array_type.ndim > 1 else layout in 'CF'
res = context.get_constant(types.boolean, val)
return impl_ret_untracked(context, builder, typ, res)
@builtin_attr
@impl_attribute_generic(types.Kind(types.Array))
def array_record_getattr(context, builder, typ, value, attr):
"""
Generic getattr() implementation for record arrays: fetch the given
record member, i.e. a subarray.
"""
arrayty = make_array(typ)
array = arrayty(context, builder, value)
rectype = typ.dtype
if not isinstance(rectype, types.Record):
raise AttributeError("attribute %r of %s not defined" % (attr, typ))
dtype = rectype.typeof(attr)
offset = rectype.offset(attr)
resty = typ.copy(dtype=dtype, layout='A')
raryty = make_array(resty)
rary = raryty(context, builder)
constoffset = context.get_constant(types.intp, offset)
llintp = context.get_value_type(types.intp)
newdata = builder.add(builder.ptrtoint(array.data, llintp), constoffset)
newdataptr = builder.inttoptr(newdata, rary.data.type)
datasize = context.get_abi_sizeof(context.get_data_type(dtype))
populate_array(rary,
data=newdataptr,
shape=array.shape,
strides=array.strides,
itemsize=context.get_constant(types.intp, datasize),
meminfo=array.meminfo,
parent=array.parent)
res = rary._getvalue()
return impl_ret_borrowed(context, builder, typ, res)
#-------------------------------------------------------------------------------
# Comparisons
@builtin
@implement('is', types.Kind(types.Array), types.Kind(types.Array))
def array_is(context, builder, sig, args):
aty, bty = sig.args
if aty != bty:
return cgutils.false_bit
def array_is_impl(a, b):
return (a.shape == b.shape and
a.strides == b.strides and
a.ctypes.data == b.ctypes.data)
return context.compile_internal(builder, array_is_impl, sig, args)
#-------------------------------------------------------------------------------
# builtin `numpy.flat` implementation
def make_array_flat_cls(flatiterty):
"""
Return the Structure representation of the given *flatiterty* (an
instance of types.NumpyFlatType).
"""
return _make_flattening_iter_cls(flatiterty, 'flat')
def make_array_ndenumerate_cls(nditerty):
"""
Return the Structure representation of the given *nditerty* (an
instance of types.NumpyNdEnumerateType).
"""
return _make_flattening_iter_cls(nditerty, 'ndenumerate')
def _increment_indices(context, builder, ndim, shape, indices, end_flag=None):
zero = context.get_constant(types.intp, 0)
one = context.get_constant(types.intp, 1)
bbend = builder.append_basic_block('end_increment')
if end_flag is not None:
builder.store(cgutils.false_byte, end_flag)
for dim in reversed(range(ndim)):
idxptr = cgutils.gep_inbounds(builder, indices, dim)
idx = increment_index(builder, builder.load(idxptr))
count = shape[dim]
in_bounds = builder.icmp(lc.ICMP_SLT, idx, count)
with cgutils.if_likely(builder, in_bounds):
builder.store(idx, idxptr)
builder.branch(bbend)
builder.store(zero, idxptr)
if end_flag is not None:
builder.store(cgutils.true_byte, end_flag)
builder.branch(bbend)
builder.position_at_end(bbend)
def _increment_indices_array(context, builder, arrty, arr, indices, end_flag=None):
shape = cgutils.unpack_tuple(builder, arr.shape, arrty.ndim)
_increment_indices(context, builder, arrty.ndim, shape, indices, end_flag)
def make_ndindex_cls(nditerty):
"""
Return the Structure representation of the given *nditerty* (an
instance of types.NumpyNdIndexType).
"""
ndim = nditerty.ndim
class NdIndexIter(cgutils.create_struct_proxy(nditerty)):
"""
.ndindex() implementation.
"""
def init_specific(self, context, builder, shapes):
zero = context.get_constant(types.intp, 0)
indices = cgutils.alloca_once(builder, zero.type,
size=context.get_constant(types.intp,
ndim))
exhausted = cgutils.alloca_once_value(builder, cgutils.false_byte)
for dim in range(ndim):
idxptr = cgutils.gep_inbounds(builder, indices, dim)
builder.store(zero, idxptr)
# 0-sized dimensions really indicate an empty array,
# but we have to catch that condition early to avoid
# a bug inside the iteration logic.
dim_size = shapes[dim]
dim_is_empty = builder.icmp(lc.ICMP_EQ, dim_size, zero)
with cgutils.if_unlikely(builder, dim_is_empty):
builder.store(cgutils.true_byte, exhausted)
self.indices = indices
self.exhausted = exhausted
self.shape = cgutils.pack_array(builder, shapes, zero.type)
def iternext_specific(self, context, builder, result):
zero = context.get_constant(types.intp, 0)
one = context.get_constant(types.intp, 1)
bbend = builder.append_basic_block('end')
exhausted = cgutils.as_bool_bit(builder, builder.load(self.exhausted))
with cgutils.if_unlikely(builder, exhausted):
result.set_valid(False)
builder.branch(bbend)
indices = [builder.load(cgutils.gep_inbounds(builder, self.indices, dim))
for dim in range(ndim)]
for load in indices:
mark_positive(builder, load)
result.yield_(cgutils.pack_array(builder, indices, zero.type))
result.set_valid(True)
shape = cgutils.unpack_tuple(builder, self.shape, ndim)
_increment_indices(context, builder, ndim, shape,
self.indices, self.exhausted)
builder.branch(bbend)
builder.position_at_end(bbend)
return NdIndexIter
def _make_flattening_iter_cls(flatiterty, kind):
assert kind in ('flat', 'ndenumerate')
array_type = flatiterty.array_type
dtype = array_type.dtype
if array_type.layout == 'C':
class CContiguousFlatIter(cgutils.create_struct_proxy(flatiterty)):
"""
.flat() / .ndenumerate() implementation for C-contiguous arrays.
"""
def init_specific(self, context, builder, arrty, arr):
zero = context.get_constant(types.intp, 0)
self.index = cgutils.alloca_once_value(builder, zero)
# We can't trust strides[-1] to always contain the right
# step value, see
# http://docs.scipy.org/doc/numpy-dev/release.html#npy-relaxed-strides-checking
self.stride = arr.itemsize
if kind == 'ndenumerate':
# Zero-initialize the indices array.
indices = cgutils.alloca_once(
builder, zero.type,
size=context.get_constant(types.intp, arrty.ndim))
for dim in range(arrty.ndim):
idxptr = cgutils.gep_inbounds(builder, indices, dim)
builder.store(zero, idxptr)
self.indices = indices
# NOTE: Using gep() instead of explicit pointer addition helps
# LLVM vectorize the loop (since the stride is known and
# constant). This is not possible in the non-contiguous case,
# where the strides are unknown at compile-time.
def iternext_specific(self, context, builder, arrty, arr, result):
zero = context.get_constant(types.intp, 0)
one = context.get_constant(types.intp, 1)
ndim = arrty.ndim
nitems = arr.nitems
index = builder.load(self.index)
is_valid = builder.icmp(lc.ICMP_SLT, index, nitems)
result.set_valid(is_valid)
with cgutils.if_likely(builder, is_valid):
ptr = builder.gep(arr.data, [index])
value = load_item(context, builder, arrty, ptr)
if kind == 'flat':
result.yield_(value)
else:
# ndenumerate(): fetch and increment indices
indices = self.indices
idxvals = [builder.load(cgutils.gep_inbounds(builder, indices, dim))
for dim in range(ndim)]
idxtuple = cgutils.pack_array(builder, idxvals)
result.yield_(
cgutils.make_anonymous_struct(builder, [idxtuple, value]))
_increment_indices_array(context, builder, arrty, arr, indices)
index = builder.add(index, one)
builder.store(index, self.index)
def getitem(self, context, builder, arrty, arr, index):
ptr = builder.gep(arr.data, [index])
return load_item(context, builder, arrty, ptr)
def setitem(self, context, builder, arrty, arr, index, value):
ptr = builder.gep(arr.data, [index])
store_item(context, builder, arrty, value, ptr)
return CContiguousFlatIter
else:
class FlatIter(cgutils.create_struct_proxy(flatiterty)):
"""
Generic .flat() / .ndenumerate() implementation for
non-contiguous arrays.
It keeps track of pointers along each dimension in order to
minimize computations.
"""
def init_specific(self, context, builder, arrty, arr):
zero = context.get_constant(types.intp, 0)
data = arr.data
ndim = arrty.ndim
shapes = cgutils.unpack_tuple(builder, arr.shape, ndim)
indices = cgutils.alloca_once(builder, zero.type,
size=context.get_constant(types.intp,
arrty.ndim))
pointers = cgutils.alloca_once(builder, data.type,
size=context.get_constant(types.intp,
arrty.ndim))
strides = cgutils.unpack_tuple(builder, arr.strides, ndim)
exhausted = cgutils.alloca_once_value(builder, cgutils.false_byte)
# Initialize indices and pointers with their start values.
for dim in range(ndim):
idxptr = cgutils.gep_inbounds(builder, indices, dim)
ptrptr = cgutils.gep_inbounds(builder, pointers, dim)
builder.store(data, ptrptr)
builder.store(zero, idxptr)
# 0-sized dimensions really indicate an empty array,
# but we have to catch that condition early to avoid
# a bug inside the iteration logic (see issue #846).
dim_size = shapes[dim]
dim_is_empty = builder.icmp(lc.ICMP_EQ, dim_size, zero)
with cgutils.if_unlikely(builder, dim_is_empty):
builder.store(cgutils.true_byte, exhausted)
self.indices = indices
self.pointers = pointers
self.exhausted = exhausted
def iternext_specific(self, context, builder, arrty, arr, result):
ndim = arrty.ndim
data = arr.data
shapes = cgutils.unpack_tuple(builder, arr.shape, ndim)
strides = cgutils.unpack_tuple(builder, arr.strides, ndim)
indices = self.indices
pointers = self.pointers
zero = context.get_constant(types.intp, 0)
one = context.get_constant(types.intp, 1)
bbend = builder.append_basic_block('end')
# Catch already computed iterator exhaustion
is_exhausted = cgutils.as_bool_bit(
builder, builder.load(self.exhausted))
with cgutils.if_unlikely(builder, is_exhausted):
result.set_valid(False)
builder.branch(bbend)
result.set_valid(True)
# Current pointer inside last dimension
last_ptr = cgutils.gep_inbounds(builder, pointers, ndim - 1)
ptr = builder.load(last_ptr)
value = load_item(context, builder, arrty, ptr)
if kind == 'flat':
result.yield_(value)
else:
# ndenumerate() => yield (indices, value)
idxvals = [builder.load(cgutils.gep_inbounds(builder, indices, dim))
for dim in range(ndim)]
idxtuple = cgutils.pack_array(builder, idxvals)
result.yield_(
cgutils.make_anonymous_struct(builder, [idxtuple, value]))
# Update indices and pointers by walking from inner
# dimension to outer.
for dim in reversed(range(ndim)):
idxptr = cgutils.gep_inbounds(builder, indices, dim)
idx = builder.add(builder.load(idxptr), one)
count = shapes[dim]
stride = strides[dim]
in_bounds = builder.icmp(lc.ICMP_SLT, idx, count)
with cgutils.if_likely(builder, in_bounds):
# Index is valid => pointer can simply be incremented.
builder.store(idx, idxptr)
ptrptr = cgutils.gep_inbounds(builder, pointers, dim)
ptr = builder.load(ptrptr)
ptr = cgutils.pointer_add(builder, ptr, stride)
builder.store(ptr, ptrptr)
# Reset pointers in inner dimensions
for inner_dim in range(dim + 1, ndim):
ptrptr = cgutils.gep_inbounds(builder, pointers, inner_dim)
builder.store(ptr, ptrptr)
builder.branch(bbend)
# Reset index and continue with next dimension
builder.store(zero, idxptr)
# End of array
builder.store(cgutils.true_byte, self.exhausted)
builder.branch(bbend)
builder.position_at_end(bbend)
def _ptr_for_index(self, context, builder, arrty, arr, index):
ndim = arrty.ndim
shapes = cgutils.unpack_tuple(builder, arr.shape, count=ndim)
strides = cgutils.unpack_tuple(builder, arr.strides, count=ndim)
# First convert the flattened index into a regular n-dim index
indices = []
for dim in reversed(range(ndim)):
indices.append(builder.urem(index, shapes[dim]))
index = builder.udiv(index, shapes[dim])
indices.reverse()
ptr = cgutils.get_item_pointer2(builder, arr.data, shapes,
strides, arrty.layout, indices)
return ptr
def getitem(self, context, builder, arrty, arr, index):
ptr = self._ptr_for_index(context, builder, arrty, arr, index)
return load_item(context, builder, arrty, ptr)
def setitem(self, context, builder, arrty, arr, index, value):
ptr = self._ptr_for_index(context, builder, arrty, arr, index)
store_item(context, builder, arrty, value, ptr)
return FlatIter
@builtin_attr
@impl_attribute(types.Kind(types.Array), "flat", types.Kind(types.NumpyFlatType))
def make_array_flatiter(context, builder, arrty, arr):
flatitercls = make_array_flat_cls(types.NumpyFlatType(arrty))
flatiter = flatitercls(context, builder)
arrayptr = cgutils.alloca_once_value(builder, arr)
flatiter.array = arrayptr
arrcls = context.make_array(arrty)
arr = arrcls(context, builder, ref=arrayptr)
flatiter.init_specific(context, builder, arrty, arr)
res = flatiter._getvalue()
return impl_ret_borrowed(context, builder, types.NumpyFlatType(arrty), res)
@builtin
@implement('iternext', types.Kind(types.NumpyFlatType))
@iternext_impl
def iternext_numpy_flatiter(context, builder, sig, args, result):
[flatiterty] = sig.args
[flatiter] = args
flatitercls = make_array_flat_cls(flatiterty)
flatiter = flatitercls(context, builder, value=flatiter)
arrty = flatiterty.array_type
arrcls = context.make_array(arrty)
arr = arrcls(context, builder, value=builder.load(flatiter.array))
flatiter.iternext_specific(context, builder, arrty, arr, result)
@builtin
@implement('getitem', types.Kind(types.NumpyFlatType), types.Kind(types.Integer))
def iternext_numpy_getitem(context, builder, sig, args):
flatiterty = sig.args[0]
flatiter, index = args
flatitercls = make_array_flat_cls(flatiterty)
flatiter = flatitercls(context, builder, value=flatiter)
arrty = flatiterty.array_type
arrcls = context.make_array(arrty)
arr = arrcls(context, builder, value=builder.load(flatiter.array))
res = flatiter.getitem(context, builder, arrty, arr, index)
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement('setitem', types.Kind(types.NumpyFlatType), types.Kind(types.Integer),
types.Any)
def iternext_numpy_getitem(context, builder, sig, args):
flatiterty = sig.args[0]
flatiter, index, value = args
flatitercls = make_array_flat_cls(flatiterty)
flatiter = flatitercls(context, builder, value=flatiter)
arrty = flatiterty.array_type
arrcls = context.make_array(arrty)
arr = arrcls(context, builder, value=builder.load(flatiter.array))
res = flatiter.setitem(context, builder, arrty, arr, index, value)
return context.get_dummy_value()
@builtin
@implement(numpy.ndenumerate, types.Kind(types.Array))
def make_array_ndenumerate(context, builder, sig, args):
arrty, = sig.args
arr, = args
nditercls = make_array_ndenumerate_cls(types.NumpyNdEnumerateType(arrty))
nditer = nditercls(context, builder)
arrayptr = cgutils.alloca_once_value(builder, arr)
nditer.array = arrayptr
arrcls = context.make_array(arrty)
arr = arrcls(context, builder, ref=arrayptr)
nditer.init_specific(context, builder, arrty, arr)
res = nditer._getvalue()
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement('iternext', types.Kind(types.NumpyNdEnumerateType))
@iternext_impl
def iternext_numpy_nditer(context, builder, sig, args, result):
[nditerty] = sig.args
[nditer] = args
nditercls = make_array_ndenumerate_cls(nditerty)
nditer = nditercls(context, builder, value=nditer)
arrty = nditerty.array_type
arrcls = context.make_array(arrty)
arr = arrcls(context, builder, value=builder.load(nditer.array))
nditer.iternext_specific(context, builder, arrty, arr, result)
@builtin
@implement(numpy.ndindex, types.VarArg(types.Kind(types.Integer)))
def make_array_ndindex(context, builder, sig, args):
"""ndindex(*shape)"""
shape = [context.cast(builder, arg, argty, types.intp)
for argty, arg in zip(sig.args, args)]
nditercls = make_ndindex_cls(types.NumpyNdIndexType(len(shape)))
nditer = nditercls(context, builder)
nditer.init_specific(context, builder, shape)
res = nditer._getvalue()
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement(numpy.ndindex, types.Kind(types.BaseTuple))
def make_array_ndindex(context, builder, sig, args):
"""ndindex(shape)"""
ndim = sig.return_type.ndim
if ndim > 0:
idxty = sig.args[0].dtype
tup = args[0]
shape = cgutils.unpack_tuple(builder, tup, ndim)
shape = [context.cast(builder, idx, idxty, types.intp)
for idx in shape]
else:
shape = []
nditercls = make_ndindex_cls(types.NumpyNdIndexType(len(shape)))
nditer = nditercls(context, builder)
nditer.init_specific(context, builder, shape)
res = nditer._getvalue()
return impl_ret_borrowed(context, builder, sig.return_type, res)
@builtin
@implement('iternext', types.Kind(types.NumpyNdIndexType))
@iternext_impl
def iternext_numpy_ndindex(context, builder, sig, args, result):
[nditerty] = sig.args
[nditer] = args
nditercls = make_ndindex_cls(nditerty)
nditer = nditercls(context, builder, value=nditer)
nditer.iternext_specific(context, builder, result)
# -----------------------------------------------------------------------------
# Numpy array constructors
def _empty_nd_impl(context, builder, arrtype, shapes):
"""Utility function used for allocating a new array during LLVM code
generation (lowering). Given a target context, builder, array
type, and a tuple or list of lowered dimension sizes, returns a
LLVM value pointing at a Numba runtime allocated array.
"""
arycls = make_array(arrtype)
ary = arycls(context, builder)
datatype = context.get_data_type(arrtype.dtype)
itemsize = context.get_constant(types.intp,
context.get_abi_sizeof(datatype))
# compute array length
arrlen = context.get_constant(types.intp, 1)
for s in shapes:
arrlen = builder.mul(arrlen, s)
if arrtype.ndim == 0:
strides = ()
elif arrtype.layout == 'C':
strides = [itemsize]
for dimension_size in reversed(shapes[1:]):
strides.append(builder.mul(strides[-1], dimension_size))
strides = tuple(reversed(strides))
elif arrtype.layout == 'F':
strides = [itemsize]
for dimension_size in shapes[:-1]:
strides.append(builder.mul(strides[-1], dimension_size))
strides = tuple(strides)
else:
raise NotImplementedError(
"Don't know how to allocate array with layout '{0}'.".format(
arrtype.layout))
allocsize = builder.mul(itemsize, arrlen)
# NOTE: AVX prefer 32-byte alignment
meminfo = context.nrt_meminfo_alloc_aligned(builder, size=allocsize,
align=32)
data = context.nrt_meminfo_data(builder, meminfo)
intp_t = context.get_value_type(types.intp)
shape_array = cgutils.pack_array(builder, shapes, ty=intp_t)
strides_array = cgutils.pack_array(builder, strides, ty=intp_t)
populate_array(ary,
data=builder.bitcast(data, datatype.as_pointer()),
shape=shape_array,
strides=strides_array,
itemsize=itemsize,
meminfo=meminfo)
return ary
def _zero_fill_array(context, builder, ary):
"""
Zero-fill an array. The array must be contiguous.
"""
cgutils.memset(builder, ary.data, builder.mul(ary.itemsize, ary.nitems), 0)
def _parse_empty_args(context, builder, sig, args):
"""
Parse the arguments of a np.empty(), np.zeros() or np.ones() call.
"""
arrshapetype = sig.args[0]
arrshape = args[0]
arrtype = sig.return_type
if isinstance(arrshapetype, types.Integer):
ndim = 1
shapes = [context.cast(builder, arrshape, arrshapetype, types.intp)]
else:
ndim = arrshapetype.count
arrshape = context.cast(builder, arrshape, arrshapetype,
types.UniTuple(types.intp, ndim))
shapes = cgutils.unpack_tuple(builder, arrshape, count=ndim)
zero = context.get_constant_generic(builder, types.intp, 0)
for dim in range(ndim):
is_neg = builder.icmp_signed('<', shapes[dim], zero)
with cgutils.if_unlikely(builder, is_neg):
context.call_conv.return_user_exc(builder, ValueError,
("negative dimensions not allowed",))
return arrtype, shapes
def _parse_empty_like_args(context, builder, sig, args):
"""
Parse the arguments of a np.empty_like(), np.zeros_like() or
np.ones_like() call.
"""
arytype = sig.args[0]
ary = make_array(arytype)(context, builder, value=args[0])
shapes = cgutils.unpack_tuple(builder, ary.shape, count=arytype.ndim)
return sig.return_type, shapes
@builtin
@implement(numpy.empty, types.Any)
@implement(numpy.empty, types.Any, types.Any)
def numpy_empty_nd(context, builder, sig, args):
arrtype, shapes = _parse_empty_args(context, builder, sig, args)
ary = _empty_nd_impl(context, builder, arrtype, shapes)
return impl_ret_new_ref(context, builder, sig.return_type, ary._getvalue())
@builtin
@implement(numpy.empty_like, types.Kind(types.Array))
@implement(numpy.empty_like, types.Kind(types.Array), types.Kind(types.DTypeSpec))
def numpy_empty_like_nd(context, builder, sig, args):
arrtype, shapes = _parse_empty_like_args(context, builder, sig, args)
ary = _empty_nd_impl(context, builder, arrtype, shapes)
return impl_ret_new_ref(context, builder, sig.return_type, ary._getvalue())
@builtin
@implement(numpy.zeros, types.Any)
@implement(numpy.zeros, types.Any, types.Any)
def numpy_zeros_nd(context, builder, sig, args):
arrtype, shapes = _parse_empty_args(context, builder, sig, args)
ary = _empty_nd_impl(context, builder, arrtype, shapes)
_zero_fill_array(context, builder, ary)
return impl_ret_new_ref(context, builder, sig.return_type, ary._getvalue())
@builtin
@implement(numpy.zeros_like, types.Kind(types.Array))
@implement(numpy.zeros_like, types.Kind(types.Array), types.Kind(types.DTypeSpec))
def numpy_zeros_like_nd(context, builder, sig, args):
arrtype, shapes = _parse_empty_like_args(context, builder, sig, args)
ary = _empty_nd_impl(context, builder, arrtype, shapes)
_zero_fill_array(context, builder, ary)
return impl_ret_new_ref(context, builder, sig.return_type, ary._getvalue())
if numpy_version >= (1, 8):
@builtin
@implement(numpy.full, types.Any, types.Any)
def numpy_full_nd(context, builder, sig, args):
def full(shape, value):
arr = numpy.empty(shape)
for idx in numpy.ndindex(arr.shape):
arr[idx] = value
return arr
res = context.compile_internal(builder, full, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.full, types.Any, types.Any, types.Kind(types.DTypeSpec))
def numpy_full_dtype_nd(context, builder, sig, args):
def full(shape, value, dtype):
arr = numpy.empty(shape, dtype)
for idx in numpy.ndindex(arr.shape):
arr[idx] = value
return arr
res = context.compile_internal(builder, full, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.full_like, types.Kind(types.Array), types.Any)
def numpy_full_like_nd(context, builder, sig, args):
def full_like(arr, value):
arr = numpy.empty_like(arr)
for idx in numpy.ndindex(arr.shape):
arr[idx] = value
return arr
res = context.compile_internal(builder, full_like, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.full_like, types.Kind(types.Array), types.Any, types.Kind(types.DTypeSpec))
def numpy_full_like_nd(context, builder, sig, args):
def full_like(arr, value, dtype):
arr = numpy.empty_like(arr, dtype)
for idx in numpy.ndindex(arr.shape):
arr[idx] = value
return arr
res = context.compile_internal(builder, full_like, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.ones, types.Any)
def numpy_ones_nd(context, builder, sig, args):
def ones(shape):
arr = numpy.empty(shape)
for idx in numpy.ndindex(arr.shape):
arr[idx] = 1
return arr
valty = sig.return_type.dtype
res = context.compile_internal(builder, ones, sig, args,
locals={'c': valty})
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.ones, types.Any, types.Kind(types.DTypeSpec))
def numpy_ones_dtype_nd(context, builder, sig, args):
def ones(shape, dtype):
arr = numpy.empty(shape, dtype)
for idx in numpy.ndindex(arr.shape):
arr[idx] = 1
return arr
res = context.compile_internal(builder, ones, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.ones_like, types.Kind(types.Array))
def numpy_ones_like_nd(context, builder, sig, args):
def ones_like(arr):
arr = numpy.empty_like(arr)
for idx in numpy.ndindex(arr.shape):
arr[idx] = 1
return arr
res = context.compile_internal(builder, ones_like, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.ones_like, types.Kind(types.Array), types.Kind(types.DTypeSpec))
def numpy_ones_like_dtype_nd(context, builder, sig, args):
def ones_like(arr, dtype):
arr = numpy.empty_like(arr, dtype)
for idx in numpy.ndindex(arr.shape):
arr[idx] = 1
return arr
res = context.compile_internal(builder, ones_like, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.identity, types.Kind(types.Integer))
def numpy_identity(context, builder, sig, args):
def identity(n):
arr = numpy.zeros((n, n))
for i in range(n):
arr[i, i] = 1
return arr
res = context.compile_internal(builder, identity, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.identity, types.Kind(types.Integer), types.Kind(types.DTypeSpec))
def numpy_identity(context, builder, sig, args):
def identity(n, dtype):
arr = numpy.zeros((n, n), dtype)
for i in range(n):
arr[i, i] = 1
return arr
res = context.compile_internal(builder, identity, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.eye, types.Kind(types.Integer))
def numpy_eye(context, builder, sig, args):
def eye(n):
return numpy.identity(n)
res = context.compile_internal(builder, eye, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.eye, types.Kind(types.Integer), types.Kind(types.Integer))
def numpy_eye(context, builder, sig, args):
def eye(n, m):
return numpy.eye(n, m, 0, numpy.float64)
res = context.compile_internal(builder, eye, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.eye, types.Kind(types.Integer), types.Kind(types.Integer),
types.Kind(types.Integer))
def numpy_eye(context, builder, sig, args):
def eye(n, m, k):
return numpy.eye(n, m, k, numpy.float64)
res = context.compile_internal(builder, eye, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.eye, types.Kind(types.Integer), types.Kind(types.Integer),
types.Kind(types.Integer), types.Kind(types.DTypeSpec))
def numpy_eye(context, builder, sig, args):
def eye(n, m, k, dtype):
arr = numpy.zeros((n, m), dtype)
if k >= 0:
d = min(n, m - k)
for i in range(d):
arr[i, i + k] = 1
else:
d = min(n + k, m)
for i in range(d):
arr[i - k, i] = 1
return arr
res = context.compile_internal(builder, eye, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.arange, types.Kind(types.Number))
def numpy_arange_1(context, builder, sig, args):
dtype = as_dtype(sig.return_type.dtype)
def arange(stop):
return numpy.arange(0, stop, 1, dtype)
res = context.compile_internal(builder, arange, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.arange, types.Kind(types.Number), types.Kind(types.Number))
def numpy_arange_2(context, builder, sig, args):
dtype = as_dtype(sig.return_type.dtype)
def arange(start, stop):
return numpy.arange(start, stop, 1, dtype)
res = context.compile_internal(builder, arange, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.arange, types.Kind(types.Number), types.Kind(types.Number),
types.Kind(types.Number))
def numpy_arange_3(context, builder, sig, args):
dtype = as_dtype(sig.return_type.dtype)
def arange(start, stop, step):
return numpy.arange(start, stop, step, dtype)
res = context.compile_internal(builder, arange, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.arange, types.Kind(types.Number), types.Kind(types.Number),
types.Kind(types.Number), types.Kind(types.DTypeSpec))
def numpy_arange_4(context, builder, sig, args):
if any(isinstance(a, types.Complex) for a in sig.args):
def arange(start, stop, step, dtype):
nitems_c = (stop - start) / step
nitems_r = math.ceil(nitems_c.real)
nitems_i = math.ceil(nitems_c.imag)
nitems = max(min(nitems_i, nitems_r), 0)
arr = numpy.empty(nitems, dtype)
val = start
for i in range(nitems):
arr[i] = val
val += step
return arr
else:
def arange(start, stop, step, dtype):
nitems_r = math.ceil((stop - start) / step)
nitems = max(nitems_r, 0)
arr = numpy.empty(nitems, dtype)
val = start
for i in range(nitems):
arr[i] = val
val += step
return arr
res = context.compile_internal(builder, arange, sig, args,
locals={'nitems': types.intp})
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.linspace, types.Kind(types.Number), types.Kind(types.Number))
def numpy_linspace_2(context, builder, sig, args):
def linspace(start, stop):
return numpy.linspace(start, stop, 50)
res = context.compile_internal(builder, linspace, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement(numpy.linspace, types.Kind(types.Number), types.Kind(types.Number),
types.Kind(types.Integer))
def numpy_linspace_3(context, builder, sig, args):
dtype = as_dtype(sig.return_type.dtype)
def linspace(start, stop, num):
arr = numpy.empty(num, dtype)
div = num - 1
delta = stop - start
arr[0] = start
for i in range(1, num):
arr[i] = start + delta * (i / div)
return arr
res = context.compile_internal(builder, linspace, sig, args)
return impl_ret_new_ref(context, builder, sig.return_type, res)
@builtin
@implement("array.copy", types.Kind(types.Array))
def array_copy(context, builder, sig, args):
arytype = sig.args[0]
ary = make_array(arytype)(context, builder, value=args[0])
shapes = cgutils.unpack_tuple(builder, ary.shape)
rettype = sig.return_type
ret = _empty_nd_impl(context, builder, rettype, shapes)
src_data = ary.data
dest_data = ret.data
assert rettype.layout == "C"
if arytype.layout == "C":
# Fast path: memcpy
# Compute array length
arrlen = context.get_constant(types.intp, 1)
for s in shapes:
arrlen = builder.mul(arrlen, s)
arrlen = builder.mul(arrlen, ary.itemsize)
pchar = lc.Type.int(8).as_pointer()
memcpy = builder.module.declare_intrinsic(
'llvm.memcpy', [pchar, pchar, arrlen.type])
builder.call(memcpy,
(builder.bitcast(dest_data, pchar),
builder.bitcast(src_data, pchar),
arrlen,
lc.Constant.int(lc.Type.int(32), 0),
lc.Constant.int(lc.Type.int(1), 0),
))
else:
src_strides = cgutils.unpack_tuple(builder, ary.strides)
dest_strides = cgutils.unpack_tuple(builder, ret.strides)
intp_t = context.get_value_type(types.intp)
with cgutils.loop_nest(builder, shapes, intp_t) as indices:
src_ptr = cgutils.get_item_pointer2(builder, src_data,
shapes, src_strides,
arytype.layout, indices)
dest_ptr = cgutils.get_item_pointer2(builder, dest_data,
shapes, dest_strides,
rettype.layout, indices)
builder.store(builder.load(src_ptr), dest_ptr)
return impl_ret_new_ref(context, builder, sig.return_type, ret._getvalue())
@builtin
@implement(numpy.frombuffer, types.Kind(types.Buffer))
@implement(numpy.frombuffer, types.Kind(types.Buffer), types.Kind(types.DTypeSpec))
def np_frombuffer(context, builder, sig, args):
bufty = sig.args[0]
aryty = sig.return_type
buf = make_array(bufty)(context, builder, value=args[0])
out_ary_ty = make_array(aryty)
out_ary = out_ary_ty(context, builder)
out_datamodel = out_ary._datamodel
itemsize = get_itemsize(context, aryty)
ll_itemsize = lc.Constant.int(buf.itemsize.type, itemsize)
nbytes = builder.mul(buf.nitems, buf.itemsize)
# Check that the buffer size is compatible
rem = builder.srem(nbytes, ll_itemsize)
is_incompatible = cgutils.is_not_null(builder, rem)
with builder.if_then(is_incompatible, likely=False):
msg = "buffer size must be a multiple of element size"
context.call_conv.return_user_exc(builder, ValueError, (msg,))
shape = cgutils.pack_array(builder, [builder.sdiv(nbytes, ll_itemsize)])
strides = cgutils.pack_array(builder, [ll_itemsize])
data = builder.bitcast(buf.data,
context.get_value_type(out_datamodel.get_type('data')))
populate_array(out_ary,
data=data,
shape=shape,
strides=strides,
itemsize=ll_itemsize,
meminfo=buf.meminfo,
parent=buf.parent,)
res = out_ary._getvalue()
return impl_ret_borrowed(context, builder, sig.return_type, res)
# -----------------------------------------------------------------------------
# Sorting
_sorting_init = False
def lt_floats(a, b):
return math.isnan(b) or a < b
def load_sorts():
"""
Load quicksort lazily, to avoid circular imports accross the jit() global.
"""
g = globals()
if g['_sorting_init']:
return
default_quicksort = quicksort.make_jit_quicksort()
g['run_default_quicksort'] = default_quicksort.run_quicksort
float_quicksort = quicksort.make_jit_quicksort(lt=lt_floats)
g['run_float_quicksort'] = float_quicksort.run_quicksort
g['_sorting_init'] = True
@builtin
@implement("array.sort", types.Kind(types.Array))
def array_sort(context, builder, sig, args):
load_sorts()
arytype = sig.args[0]
dtype = arytype.dtype
if isinstance(dtype, types.Float):
def array_sort_impl(arr):
return run_float_quicksort(arr)
else:
def array_sort_impl(arr):
return run_default_quicksort(arr)
return context.compile_internal(builder, array_sort_impl, sig, args)
@builtin
@implement(numpy.sort, types.Kind(types.Array))
def np_sort(context, builder, sig, args):
def np_sort_impl(a):
res = a.copy()
res.sort()
return res
return context.compile_internal(builder, np_sort_impl, sig, args)
|
bsd-2-clause
| -1,622,536,346,280,335,400
| 34.741639
| 96
| 0.599943
| false
| 3.689943
| false
| false
| false
|
frombeijingwithlove/dlcv_for_beginners
|
chap6/bbox_labeling/bbox_labeling.py
|
1
|
7575
|
import os
import cv2
from tkFileDialog import askdirectory
from tkMessageBox import askyesno
WINDOW_NAME = 'Simple Bounding Box Labeling Tool'
FPS = 24
SUPPOTED_FORMATS = ['jpg', 'jpeg', 'png']
DEFAULT_COLOR = {'Object': (255, 0, 0)}
COLOR_GRAY = (192, 192, 192)
BAR_HEIGHT = 16
KEY_UP = 65362
KEY_DOWN = 65364
KEY_LEFT = 65361
KEY_RIGHT = 65363
KEY_ESC = 27
KEY_DELETE = 65535
KEY_EMPTY = 0
get_bbox_name = '{}.bbox'.format
class SimpleBBoxLabeling:
def __init__(self, data_dir, fps=FPS, window_name=None):
self._data_dir = data_dir
self.fps = fps
self.window_name = window_name if window_name else WINDOW_NAME
self._pt0 = None
self._pt1 = None
self._drawing = False
self._cur_label = None
self._bboxes = []
label_path = '{}.labels'.format(self._data_dir)
self.label_colors = DEFAULT_COLOR if not os.path.exists(label_path) else self.load_labels(label_path)
imagefiles = [x for x in os.listdir(self._data_dir) if x[x.rfind('.') + 1:].lower() in SUPPOTED_FORMATS]
labeled = [x for x in imagefiles if os.path.exists(get_bbox_name(x))]
to_be_labeled = [x for x in imagefiles if x not in labeled]
self._filelist = labeled + to_be_labeled
self._index = len(labeled)
if self._index > len(self._filelist) - 1:
self._index = len(self._filelist) - 1
def _mouse_ops(self, event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
self._drawing = True
self._pt0 = (x, y)
elif event == cv2.EVENT_LBUTTONUP:
self._drawing = False
self._pt1 = (x, y)
self._bboxes.append((self._cur_label, (self._pt0, self._pt1)))
elif event == cv2.EVENT_MOUSEMOVE:
self._pt1 = (x, y)
elif event == cv2.EVENT_RBUTTONUP:
if self._bboxes:
self._bboxes.pop()
def _clean_bbox(self):
self._pt0 = None
self._pt1 = None
self._drawing = False
self._bboxes = []
def _draw_bbox(self, img):
h, w = img.shape[:2]
canvas = cv2.copyMakeBorder(img, 0, BAR_HEIGHT, 0, 0, cv2.BORDER_CONSTANT, value=COLOR_GRAY)
label_msg = '{}: {}, {}'.format(self._cur_label, self._pt0, self._pt1) \
if self._drawing \
else 'Current label: {}'.format(self._cur_label)
msg = '{}/{}: {} | {}'.format(self._index + 1, len(self._filelist), self._filelist[self._index], label_msg)
cv2.putText(canvas, msg, (1, h+12),
cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 0, 0), 1)
for label, (bpt0, bpt1) in self._bboxes:
label_color = self.label_colors[label] if label in self.label_colors else COLOR_GRAY
cv2.rectangle(canvas, bpt0, bpt1, label_color, thickness=2)
cv2.putText(canvas, label, (bpt0[0]+3, bpt0[1]+15),
cv2.FONT_HERSHEY_SIMPLEX,
0.5, label_color, 2)
if self._drawing:
label_color = self.label_colors[self._cur_label] if self._cur_label in self.label_colors else COLOR_GRAY
if self._pt1[0] >= self._pt0[0] and self._pt1[1] >= self._pt0[1]:
cv2.rectangle(canvas, self._pt0, self._pt1, label_color, thickness=2)
cv2.putText(canvas, self._cur_label, (self._pt0[0] + 3, self._pt0[1] + 15),
cv2.FONT_HERSHEY_SIMPLEX,
0.5, label_color, 2)
return canvas
@staticmethod
def export_bbox(filepath, bboxes):
if bboxes:
with open(filepath, 'w') as f:
for bbox in bboxes:
line = repr(bbox) + '\n'
f.write(line)
elif os.path.exists(filepath):
os.remove(filepath)
@staticmethod
def load_bbox(filepath):
bboxes = []
with open(filepath, 'r') as f:
line = f.readline().rstrip()
while line:
bboxes.append(eval(line))
line = f.readline().rstrip()
return bboxes
@staticmethod
def load_labels(filepath):
label_colors = {}
with open(filepath, 'r') as f:
line = f.readline().rstrip()
while line:
label, color = eval(line)
label_colors[label] = color
line = f.readline().rstrip()
return label_colors
@staticmethod
def load_sample(filepath):
img = cv2.imread(filepath)
bbox_filepath = get_bbox_name(filepath)
bboxes = []
if os.path.exists(bbox_filepath):
bboxes = SimpleBBoxLabeling.load_bbox(bbox_filepath)
return img, bboxes
def _export_n_clean_bbox(self):
bbox_filepath = os.sep.join([self._data_dir, get_bbox_name(self._filelist[self._index])])
self.export_bbox(bbox_filepath, self._bboxes)
self._clean_bbox()
def _delete_current_sample(self):
filename = self._filelist[self._index]
filepath = os.sep.join([self._data_dir, filename])
if os.path.exists(filepath):
os.remove(filepath)
filepath = get_bbox_name(filepath)
if os.path.exists(filepath):
os.remove(filepath)
self._filelist.pop(self._index)
print('{} is deleted!'.format(filename))
def start(self):
last_filename = ''
label_index = 0
labels = self.label_colors.keys()
n_labels = len(labels)
cv2.namedWindow(self.window_name)
cv2.setMouseCallback(self.window_name, self._mouse_ops)
key = KEY_EMPTY
delay = int(1000 / FPS)
while key != KEY_ESC:
if key == KEY_UP:
if label_index == 0:
pass
else:
label_index -= 1
elif key == KEY_DOWN:
if label_index == n_labels - 1:
pass
else:
label_index += 1
elif key == KEY_LEFT:
if self._index > 0:
self._export_n_clean_bbox()
self._index -= 1
if self._index < 0:
self._index = 0
elif key == KEY_RIGHT:
if self._index < len(self._filelist) - 1:
self._export_n_clean_bbox()
self._index += 1
if self._index > len(self._filelist) - 1:
self._index = len(self._filelist) - 1
elif key == KEY_DELETE:
if askyesno('Delete Sample', 'Are you sure?'):
self._delete_current_sample()
key = KEY_EMPTY
continue
filename = self._filelist[self._index]
if filename != last_filename:
filepath = os.sep.join([self._data_dir, filename])
img, self._bboxes = self.load_sample(filepath)
self._cur_label = labels[label_index]
canvas = self._draw_bbox(img)
cv2.imshow(self.window_name, canvas)
key = cv2.waitKey(delay)
last_filename = filename
print('Finished!')
cv2.destroyAllWindows()
self.export_bbox(os.sep.join([self._data_dir, get_bbox_name(filename)]), self._bboxes)
print('Labels updated!')
if __name__ == '__main__':
dir_with_images = askdirectory(title='Where are the images?')
labeling_task = SimpleBBoxLabeling(dir_with_images)
labeling_task.start()
|
bsd-3-clause
| 2,674,751,870,218,334,700
| 32.223684
| 116
| 0.532937
| false
| 3.620937
| false
| false
| false
|
teampopong/pokr.kr
|
alembic/versions/3e683fc1af11_region_id_field_of_meetings_table.py
|
1
|
1559
|
# -*- coding: utf-8 -*-
"""region_id field of 'meetings' table
Revision ID: 3e683fc1af11
Revises: 2f08fb65fe0b
Create Date: 2014-05-24 21:31:25.378918
"""
from __future__ import unicode_literals
# revision identifiers, used by Alembic.
revision = '3e683fc1af11'
down_revision = '2f08fb65fe0b'
from alembic import op
from sqlalchemy.sql import table, column
import sqlalchemy as sa
region = table('region',
column('id', sa.String(16)),
column('name', sa.Unicode(20)),
column('name_en', sa.String(80)),
)
def upgrade():
op.alter_column('meeting', 'id', type_=sa.BigInteger, autoincrement=False)
op.alter_column('meeting_attendee', 'meeting_id', type_=sa.BigInteger)
op.alter_column('statement', 'meeting_id', type_=sa.BigInteger)
op.add_column('meeting', sa.Column('region_id', sa.String(length=16)))
op.create_index(op.f('ix_meeting_region_id'), 'meeting', ['region_id'], unique=False)
op.execute(
region.insert()\
.values({
'id': '0',
'name': '대한민국',
'name_en': 'national',
})
)
def downgrade():
op.alter_column('meeting', 'id', type_=sa.Integer, autoincrement=True)
op.alter_column('meeting_attendee', 'meeting_id', type_=sa.Integer)
op.alter_column('statement', 'meeting_id', type_=sa.Integer)
op.drop_index(op.f('ix_meeting_region_id'), table_name='meeting')
op.drop_column('meeting', 'region_id')
op.execute(
region.delete()\
.where(region.c.id == '0')
)
|
apache-2.0
| -873,324,810,509,578,100
| 27.722222
| 89
| 0.620245
| false
| 3.05315
| false
| false
| false
|
Ektorus/bohrium
|
ve/cpu/tools/locate.py
|
1
|
8762
|
from __future__ import print_function
## 3D Lattice Boltzmann (BGK) model of a fluid.
## D3Q19 model. At each timestep, particle densities propagate
## outwards in the directions indicated in the figure. An
## equivalent 'equilibrium' density is found, and the densities
## relax towards that state, in a proportion governed by omega.
## Iain Haslam, March 2006.
import util
if util.Benchmark().bohrium:
import bohrium as np
else:
import numpy as np
def main():
B = util.Benchmark()
nx = B.size[0]
ny = B.size[1]
nz = B.size[2]
ITER = B.size[3]
NO_OBST = 1
omega = 1.0
density = 1.0
deltaU = 1e-7
t1 = 1/3.0
t2 = 1/18.0
t3 = 1/36.0
B.start()
F = np.ones((19, nx, ny, nz), dtype=np.float64)
F[:] = density/19.0
FEQ = np.ones((19, nx, ny, nz), dtype=np.float64)
FEQ[:] = density/19.0
T = np.zeros((19, nx, ny, nz), dtype=np.float64)
#Create the scenery.
BOUND = np.zeros((nx, ny, nz), dtype=np.float64)
BOUNDi = np.ones((nx, ny, nz), dtype=np.float64)
"""
if not NO_OBST:
for i in xrange(nx):
for j in xrange(ny):
for k in xrange(nz):
if ((i-4)**2+(j-5)**2+(k-6)**2) < 6:
BOUND[i,j,k] += 1.0
BOUNDi[i,j,k] += 0.0
BOUND[:,0,:] += 1.0
BOUNDi[:,0,:] *= 0.0
"""
if util.Benchmark().bohrium:
np.flush()
for ts in xrange(0, ITER):
##Propagate / Streaming step
T[:] = F
#nearest-neighbours
F[1,:,:,0] = T[1,:,:,-1]
F[1,:,:,1:] = T[1,:,:,:-1]
F[2,:,:,:-1] = T[2,:,:,1:]
F[2,:,:,-1] = T[2,:,:,0]
F[3,:,0,:] = T[3,:,-1,:]
F[3,:,1:,:] = T[3,:,:-1,:]
F[4,:,:-1,:] = T[4,:,1:,:]
F[4,:,-1,:] = T[4,:,0,:]
F[5,0,:,:] = T[5,-1,:,:]
F[5,1:,:,:] = T[5,:-1,:,:]
F[6,:-1,:,:] = T[6,1:,:,:]
F[6,-1,:,:] = T[6,0,:,:]
#next-nearest neighbours
F[7,0 ,0 ,:] = T[7,-1 , -1,:]
F[7,0 ,1:,:] = T[7,-1 ,:-1,:]
F[7,1:,0 ,:] = T[7,:-1, -1,:]
F[7,1:,1:,:] = T[7,:-1,:-1,:]
F[8,0 ,:-1,:] = T[8,-1 ,1:,:]
F[8,0 , -1,:] = T[8,-1 ,0 ,:]
F[8,1:,:-1,:] = T[8,:-1,1:,:]
F[8,1:, -1,:] = T[8,:-1,0 ,:]
F[9,:-1,0 ,:] = T[9,1:, -1,:]
F[9,:-1,1:,:] = T[9,1:,:-1,:]
F[9,-1 ,0 ,:] = T[9,0 , 0,:]
F[9,-1 ,1:,:] = T[9,0 ,:-1,:]
F[10,:-1,:-1,:] = T[10,1:,1:,:]
F[10,:-1, -1,:] = T[10,1:,0 ,:]
F[10,-1 ,:-1,:] = T[10,0 ,1:,:]
F[10,-1 , -1,:] = T[10,0 ,0 ,:]
F[11,0 ,:,0 ] = T[11,0 ,:, -1]
F[11,0 ,:,1:] = T[11,0 ,:,:-1]
F[11,1:,:,0 ] = T[11,:-1,:, -1]
F[11,1:,:,1:] = T[11,:-1,:,:-1]
F[12,0 ,:,:-1] = T[12, -1,:,1:]
F[12,0 ,:, -1] = T[12, -1,:,0 ]
F[12,1:,:,:-1] = T[12,:-1,:,1:]
F[12,1:,:, -1] = T[12,:-1,:,0 ]
F[13,:-1,:,0 ] = T[13,1:,:, -1]
F[13,:-1,:,1:] = T[13,1:,:,:-1]
F[13, -1,:,0 ] = T[13,0 ,:, -1]
F[13, -1,:,1:] = T[13,0 ,:,:-1]
F[14,:-1,:,:-1] = T[14,1:,:,1:]
F[14,:-1,:, -1] = T[14,1:,:,0 ]
F[14,-1 ,:,:-1] = T[14,0 ,:,1:]
F[14,-1 ,:, -1] = T[14,0 ,:,0 ]
F[15,:,0 ,0 ] = T[15,:, -1, -1]
F[15,:,0 ,1:] = T[15,:, -1,:-1]
F[15,:,1:,0 ] = T[15,:,:-1, -1]
F[15,:,1:,1:] = T[15,:,:-1,:-1]
F[16,:,0 ,:-1] = T[16,:, -1,1:]
F[16,:,0 , -1] = T[16,:, -1,0 ]
F[16,:,1:,:-1] = T[16,:,:-1,1:]
F[16,:,1:, -1] = T[16,:,:-1,0 ]
F[17,:,:-1,0 ] = T[17,:,1:, -1]
F[17,:,:-1,1:] = T[17,:,1:,:-1]
F[17,:, -1,0 ] = T[17,:,0 , -1]
F[17,:, -1,1:] = T[17,:,0 ,:-1]
F[18,:,:-1,:-1] = T[18,:,1:,1:]
F[18,:,:-1, -1] = T[18,:,1:,0 ]
F[18,:,-1 ,:-1] = T[18,:,0 ,1:]
F[18,:,-1 , -1] = T[18,:,0 ,0 ]
#Densities bouncing back at next timestep
BB = np.empty(F.shape)
T[:] = F
T[1:,:,:,:] *= BOUND[np.newaxis,:,:,:]
BB[2 ,:,:,:] += T[1 ,:,:,:]
BB[1 ,:,:,:] += T[2 ,:,:,:]
BB[4 ,:,:,:] += T[3 ,:,:,:]
BB[3 ,:,:,:] += T[4 ,:,:,:]
BB[6 ,:,:,:] += T[5 ,:,:,:]
BB[5 ,:,:,:] += T[6 ,:,:,:]
BB[10,:,:,:] += T[7 ,:,:,:]
BB[9 ,:,:,:] += T[8 ,:,:,:]
BB[8 ,:,:,:] += T[9 ,:,:,:]
BB[7 ,:,:,:] += T[10,:,:,:]
BB[14,:,:,:] += T[11,:,:,:]
BB[13,:,:,:] += T[12,:,:,:]
BB[12,:,:,:] += T[13,:,:,:]
BB[11,:,:,:] += T[14,:,:,:]
BB[18,:,:,:] += T[15,:,:,:]
BB[17,:,:,:] += T[16,:,:,:]
BB[16,:,:,:] += T[17,:,:,:]
BB[15,:,:,:] += T[18,:,:,:]
# Relax calculate equilibrium state (FEQ) with equivalent speed and density to F
DENSITY = np.add.reduce(F)
#UX = F[5,:,:,:].copy()
UX = np.ones(F[5,:,:,:].shape, dtype=np.float64)
UX[:,:,:] = F[5,:,:,:]
UX += F[7,:,:,:]
UX += F[8,:,:,:]
UX += F[11,:,:,:]
UX += F[12,:,:,:]
UX -= F[6,:,:,:]
UX -= F[9,:,:,:]
UX -= F[10,:,:,:]
UX -= F[13,:,:,:]
UX -= F[14,:,:,:]
UX /=DENSITY
#UY = F[3,:,:,:].copy()
UY = np.ones(F[3,:,:,:].shape, dtype=np.float64)
UY[:,:,:] = F[3,:,:,:]
UY += F[7,:,:,:]
UY += F[9,:,:,:]
UY += F[15,:,:,:]
UY += F[16,:,:,:]
UY -= F[4,:,:,:]
UY -= F[8,:,:,:]
UY -= F[10,:,:,:]
UY -= F[17,:,:,:]
UY -= F[18,:,:,:]
UY /=DENSITY
#UZ = F[1,:,:,:].copy()
UZ = np.ones(F[1,:,:,:].shape, dtype=np.float64)
UZ[:,:,:] = F[1,:,:,:]
UZ += F[11,:,:,:]
UZ += F[13,:,:,:]
UZ += F[15,:,:,:]
UZ += F[17,:,:,:]
UZ -= F[2,:,:,:]
UZ -= F[12,:,:,:]
UZ -= F[14,:,:,:]
UZ -= F[16,:,:,:]
UZ -= F[18,:,:,:]
UZ /=DENSITY
UX[0,:,:] += deltaU #Increase inlet pressure
#Set bourderies to zero.
UX[:,:,:] *= BOUNDi
UY[:,:,:] *= BOUNDi
UZ[:,:,:] *= BOUNDi
DENSITY[:,:,:] *= BOUNDi
U_SQU = UX**2 + UY**2 + UZ**2
# Calculate equilibrium distribution: stationary
FEQ[0,:,:,:] = (t1*DENSITY)*(1.0-3.0*U_SQU/2.0)
# nearest-neighbours
T1 = 3.0/2.0*U_SQU
tDENSITY = t2*DENSITY
FEQ[1,:,:,:]=tDENSITY*(1.0 + 3.0*UZ + 9.0/2.0*UZ**2 - T1)
FEQ[2,:,:,:]=tDENSITY*(1.0 - 3.0*UZ + 9.0/2.0*UZ**2 - T1)
FEQ[3,:,:,:]=tDENSITY*(1.0 + 3.0*UY + 9.0/2.0*UY**2 - T1)
FEQ[4,:,:,:]=tDENSITY*(1.0 - 3.0*UY + 9.0/2.0*UY**2 - T1)
FEQ[5,:,:,:]=tDENSITY*(1.0 + 3.0*UX + 9.0/2.0*UX**2 - T1)
FEQ[6,:,:,:]=tDENSITY*(1.0 - 3.0*UX + 9.0/2.0*UX**2 - T1)
# next-nearest neighbours
T1 = 3.0*U_SQU/2.0
tDENSITY = t3*DENSITY
U8 = UX+UY
FEQ[7,:,:,:] =tDENSITY*(1.0 + 3.0*U8 + 9.0/2.0*(U8)**2 - T1)
U9 = UX-UY
FEQ[8,:,:,:] =tDENSITY*(1.0 + 3.0*U9 + 9.0/2.0*(U9)**2 - T1)
U10 = -UX+UY
FEQ[9,:,:,:] =tDENSITY*(1.0 + 3.0*U10 + 9.0/2.0*(U10)**2 - T1)
U8 *= -1.0
FEQ[10,:,:,:]=tDENSITY*(1.0 + 3.0*U8 + 9.0/2.0*(U8)**2 - T1)
U12 = UX+UZ
FEQ[11,:,:,:]=tDENSITY*(1.0 + 3.0*U12 + 9.0/2.0*(U12)**2 - T1)
U12 *= 1.0
FEQ[14,:,:,:]=tDENSITY*(1.0 + 3.0*U12 + 9.0/2.0*(U12)**2 - T1)
U13 = UX-UZ
FEQ[12,:,:,:]=tDENSITY*(1.0 + 3.0*U13 + 9.0/2.0*(U13)**2 - T1)
U13 *= -1.0
FEQ[13,:,:,:]=tDENSITY*(1.0 + 3.0*U13 + 9.0/2.0*(U13)**2 - T1)
U16 = UY+UZ
FEQ[15,:,:,:]=tDENSITY*(1.0 + 3.0*U16 + 9.0/2.0*(U16)**2 - T1)
U17 = UY-UZ
FEQ[16,:,:,:]=tDENSITY*(1.0 + 3.0*U17 + 9.0/2.0*(U17)**2 - T1)
U17 *= -1.0
FEQ[17,:,:,:]=tDENSITY*(1.0 + 3.0*U17 + 9.0/2.0*(U17)**2 - T1)
U16 *= -1.0
FEQ[18,:,:,:]=tDENSITY*(1.0 + 3.0*U16 + 9.0/2.0*(U16)**2 - T1)
F *= (1.0-omega)
F += omega * FEQ
#Densities bouncing back at next timestep
F[1:,:,:,:] *= BOUNDi[np.newaxis,:,:,:]
F[1:,:,:,:] += BB[1:,:,:,:]
del BB
del T1
del UX, UY, UZ
del U_SQU
del DENSITY, tDENSITY
del U8, U9, U10, U12, U13, U16, U17
if util.Benchmark().bohrium:
np.flush()
B.stop()
B.pprint()
if B.outputfn:
B.tofile(B.outputfn, {'res': UX})
"""
import matplotlib.pyplot as plt
UX *= -1
plt.hold(True)
plt.quiver(UY[:,:,4],UX[:,:,4], pivot='middle')
plt.imshow(BOUND[:,:,4])
plt.show()
"""
if __name__ == "__main__":
main()
|
lgpl-3.0
| -477,230,179,552,261,200
| 30.070922
| 88
| 0.351518
| false
| 2.279396
| false
| false
| false
|
Micronaet/micronaet-mx
|
sale_discount/model/discount.py
|
1
|
4156
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>).
#
# Original module for stock.move from:
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
discount_type = [
('integrated', 'Integrate'),
('inline', 'Inline'),
('row', 'Different row'),
]
class ResPartner(orm.Model):
''' Extra elemtent for manage discount
'''
_inherit = 'res.partner'
def onchange_discount(self, cr, uid, ids, discount_scale, discount,
mode='scale', context=None):
''' Update discount depend on scale (or reset scale)
'''
res = {'value': {}}
try:
if mode == 'scale':
scale = discount_scale.split('+')
discount_scale_cleaned = ''
rate = 100.0
for i in scale:
i = float(i.strip().replace('%', '').replace(',', '.'))
rate -= rate * i / 100.0
discount_scale_cleaned += "%s%5.2f%s " % (
'+' if discount_scale_cleaned else '', i, '%')
res['value']['discount'] = 100.0 - rate
res['value']['discount_scale'] = discount_scale_cleaned
else: # 'discount':
pass #res['value']['discount_scale'] = False
except:
res['warning'] = {
'title': _('Discount error'),
'message': _('Scale value not correct!'),
}
return res
_columns = {
'discount_type': fields.selection(discount_type, 'Discount type'),
'discount_scale': fields.char('Discount scale', size=35),
'discount': fields.float('Discount', digits=(
16, 2), help='Automated calculate if scale is indicated'),
}
_defaults = {
'discount_type': lambda *x: 'integrated',
}
class SaleOrderLine(orm.Model):
''' Add agent commission
'''
_inherit = 'sale.order.line'
def onchange_discount(self, cr, uid, ids, discount_scale, discount,
mode='scale', context=None):
''' Call onchange in partner
'''
return self.pool.get('res.partner').onchange_discount(cr, uid, ids,
discount_scale=discount_scale, discount=discount, mode=mode,
context=context)
_columns = {
'discount_type': fields.selection(discount_type, 'Discount type'),
'discount_scale': fields.char('Discount scale', size=15),
}
_defaults = {
'discount_type': lambda *x: 'integrated',
}
|
agpl-3.0
| 6,720,405,219,061,504,000
| 34.220339
| 79
| 0.568816
| false
| 4.311203
| false
| false
| false
|
soulnothing/FlaskReDoc
|
example/exampleapplication.py
|
1
|
3734
|
import sys
import re
import json
from flask import current_app, render_template, render_template_string
from flask import Flask, jsonify
from threading import Thread
from flaskredoc import ReDoc
from werkzeug.wsgi import DispatcherMiddleware
from werkzeug.serving import run_simple
from werkzeug.debug import DebuggedApplication
import os
"""
This is an example application is does not actually do anything really.
All responses are canned and sometimes randomize json structures. It is
meant to show the ease of documenting your application.
"""
resp_folder=os.path.join(os.getcwd(), 'responses')
app = Flask(__name__)
app.debug = True
@app.route('/')
def blah():
'''
The entry point function, that just prints a string.
Function with out much purpose, just returns a string.
* @CODE 200: A successful response.
* @RESPONSE: sample.txt, Example Successful Response, 200 OK
'''
return "This is an example application, please see the help section"
@app.route('/db/user/<username>')
@app.route('/db/user', defaults={"username": None})
def query_user(username):
"""
Returns a json structure containing user information.
Takes in a username as a parameter either as a GET paramater or in the
url structure. It retrieves basic information including the username,
group, user id, and location. Case of the user name does not matter,
as the provided user name is forced to lower case prior to querying.
* @RESPONSE: db-user-query.json, Example Successful response, 200 OK
* query: /db/users/bob
* description: Querying for the user bob and gathering user information.
* @RESPONSE: db-users-query-error.json, Example User Does not exist, 400 BAD Response
* query: /db/users/gizmo
* description: Querying a non existent user.
* @RESPONSE: db-users-query-no-param.json, Example Invalid Parameters, 400 BAD Response
* query: /db/users
* description: No username is specified in the query.
* @GROUP: Database, User Management
* @URL 1 username: Specify the username to retrieve from the database.
* @GET username: Specify the username to retrieve from the database.
* @CODE 200: Successful response
* @CODE 400: Bad response queried user does not exist, or no parameters provided.
"""
return "user query"
@app.route('/db/users')
def query_users():
"""
Returns a list of all users.
Queries the database and returns an array
of all valid user names from the database.
* @RESPONSE: db-query-users.json, Example Successful Response, 200 OK
* query: /db/users
* description: A query to list all users.
* @RESPONSE: db-query-users-location.json, Example Successful Location Response, 200 OK
* query: /db/users?location=Dallas
* description: Query the Dallas location for it's users.
* @RESPONSE: db-query-users-group.xml, Example Successful Group Response, 200 OK
* query: /db/users?group=it
* description: Query the group it for it's users. Due to antiquated systems this is in xml.
* @GET group: Specify the group, you wish to get a list of users for.
* @GET location: Specify the location you wish to get a list of users for.
* @CODE 200: A successful response.
"""
return "users"
if __name__ == "__main__":
doc = ReDoc(app=app, respfolder=os.path.join(os.getcwd(), 'responses'))
doc.doc_app()
frontend = doc.create_frontend()
frontend.debug = True
api = doc.create_help_api()
api.debug = True
application = DispatcherMiddleware(app, {'/help': frontend,
'/help/api': api
})
run_simple('0.0.0.0', 5000, application, use_reloader=True, use_debugger=True)
|
mit
| 8,131,353,653,670,957,000
| 37.895833
| 99
| 0.697108
| false
| 3.959703
| false
| false
| false
|
cykl/hprof2flamegraph
|
stackcollapse_hpl.py
|
1
|
8006
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014, Clément MATHIEU
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import struct
import collections
import sys
import re
Method = collections.namedtuple('Method', ['id', 'file_name', 'class_name', 'method_name'])
Trace = collections.namedtuple('Trace', ['thread_id', 'frame_count', 'frames'])
Frame = collections.namedtuple('Frame', ['bci', 'line_no', 'method_id'])
AGENT_ERRORS = [
"No Java Frames[ERR=0]",
"No class load[ERR=-1]",
"GC Active[ERR=-2]",
"Unknown not Java[ERR=-3]",
"Not walkable not Java[ERR=-4]",
"Unknown Java[ERR=-5]",
"Not walkable Java[ERR=-6]",
"Unknown state[ERR=-7]",
"Thread exit[ERR=-8]",
"Deopt[ERR=-9]",
"Safepoint[ERR=-10]",
]
def parse_hpl_string(fh):
(length,) = struct.unpack('>i', fh.read(4))
(val,) = struct.unpack('>%ss' % length, fh.read(length))
return val.decode('utf-8')
def parse_hpl(filename):
traces = []
methods = {}
for (index, error) in enumerate(AGENT_ERRORS):
method_id = -1 - index
methods[method_id] = Method(method_id, "", "/Error/", error)
with open(filename, 'rb') as fh:
while True:
marker_str = fh.read(1)
if not marker_str:
break
(marker,) = struct.unpack('>b', marker_str)
if marker == 0:
break
elif marker == 1 or marker == 11:
(frame_count, thread_id) = struct.unpack('>iQ', fh.read(4 + 8))
# marker is 11, read the time
if marker == 11:
(time_sec, time_nano) = struct.unpack('>QQ', fh.read(8+8))
if frame_count > 0:
traces.append(Trace(thread_id, frame_count, []))
else: # Negative frame_count are used to report error
if abs(frame_count) > len(AGENT_ERRORS):
method_id = frame_count - 1
methods[method_id] = Method(method_id, "Unknown err[ERR=%s]" % frame_count)
frame = Frame(None, None, frame_count - 1)
traces.append(Trace(thread_id, 1, [frame]))
elif marker == 2:
(bci, method_id) = struct.unpack('>iQ', fh.read(4 + 8))
frame = Frame(bci, None, method_id)
traces[-1].frames.append(frame)
elif marker == 21:
(bci, line_no, method_id) = struct.unpack('>iiQ', fh.read(4 + 4 + 8))
if line_no < 0: # Negative line_no are used to report that line_no is not available (-100 & -101)
line_no = None
frame = Frame(bci, line_no, method_id)
traces[-1].frames.append(frame)
elif marker == 3:
(method_id,) = struct.unpack('>Q', fh.read(8))
file_name = parse_hpl_string(fh)
class_name = parse_hpl_string(fh)
method_name = parse_hpl_string(fh)
methods[method_id] = Method(method_id, file_name, class_name, method_name)
elif marker == 31:
(method_id,) = struct.unpack('>Q', fh.read(8))
file_name = parse_hpl_string(fh)
class_name = parse_hpl_string(fh)
class_name_generic = parse_hpl_string(fh)
method_name = parse_hpl_string(fh)
method_signature = parse_hpl_string(fh)
method_signature_generic = parse_hpl_string(fh)
methods[method_id] = Method(method_id, file_name, class_name, method_name)
elif marker == 4: # 4 means thread meta, not useful in flame graph
(thread_id,) = struct.unpack('>Q', fh.read(8))
thread_name = parse_hpl_string(fh)
else:
raise Exception("Unexpected marker: %s at offset %s" % (marker, fh.tell()))
return traces, methods
def abbreviate_package(class_name):
match_object = re.match(r'(?P<package>.*\.)(?P<remainder>[^.]+\.[^.]+)$', class_name)
if match_object is None:
return class_name
shortened_pkg = re.sub(r'(\w)\w*', r'\1', match_object.group('package'))
return "%s%s" % (shortened_pkg, match_object.group('remainder'))
def get_method_name(method, shorten_pkgs):
class_name = method.class_name[1:-1].replace('/', '.')
if shorten_pkgs:
class_name = abbreviate_package(class_name)
method_name = class_name
method_name += '.' + method.method_name
return method_name
def format_frame(frame, method, discard_lineno, shorten_pkgs):
formatted_frame = get_method_name(method, shorten_pkgs)
if not discard_lineno and frame.line_no:
formatted_frame += ':' + str(frame.line_no)
return formatted_frame
def main(argv=None, out=sys.stdout):
import argparse
parser = argparse.ArgumentParser(description='Convert an hpl file into Flamegraph collapsed stacks')
parser.add_argument('hpl_file', metavar='FILE', type=str, nargs=1, help='A hpl file')
parser.add_argument('--discard-lineno', dest='discard_lineno', action='store_true', help='Remove line numbers')
parser.add_argument('--discard-thread', dest='discard_thread', action='store_true', help='Remove thread info')
parser.add_argument('--shorten-pkgs', dest='shorten_pkgs', action='store_true', help='Shorten package names')
parser.add_argument('--skip-trace-on-missing-frame', dest='skip_trace_on_missing_frame', action='store_true', help='Continue processing even if frames are missing')
args = parser.parse_args(argv)
filename = args.hpl_file[0]
(traces, methods) = parse_hpl(filename)
folded_stacks = collections.defaultdict(int)
for trace in traces:
frames = []
skip_trace = False
for frame in trace.frames:
if args.skip_trace_on_missing_frame and not frame.method_id in methods:
sys.stderr.write("skipped missing frame %s\n" % frame.method_id)
skip_trace = True
break
frames.append(format_frame(
frame,
methods[frame.method_id],
args.discard_lineno,
args.shorten_pkgs
))
if skip_trace:
continue
if not args.discard_thread:
frames.append('Thread %s' % trace.thread_id)
folded_stack = ';'.join(reversed(frames))
folded_stacks[folded_stack] += 1
for folded_stack in sorted(folded_stacks):
sample_count = folded_stacks[folded_stack]
print("%s %s" % (folded_stack, sample_count), file=out)
return 0
if __name__ == '__main__':
main()
|
bsd-2-clause
| 1,047,224,140,835,399,000
| 39.226131
| 168
| 0.602998
| false
| 3.78666
| false
| false
| false
|
AdrienGuille/pyMABED
|
build_event_browser.py
|
1
|
3036
|
# coding: utf-8
# std
import time
import argparse
import os
import shutil
# web
from flask import Flask, render_template
from flask_frozen import Freezer
# mabed
import mabed.utils as utils
__author__ = "Adrien Guille"
__email__ = "adrien.guille@univ-lyon2.fr"
event_browser = Flask(__name__, static_folder='browser/static', template_folder='browser/templates')
@event_browser.route('/')
def index():
return render_template('template.html',
events=event_descriptions,
event_impact='[' + ','.join(impact_data) + ']',
k=mabed.k,
theta=mabed.theta,
sigma=mabed.sigma)
if __name__ == '__main__':
p = argparse.ArgumentParser(description='Build event browser')
p.add_argument('i', metavar='input', type=str, help='Input pickle file')
p.add_argument('--o', metavar='output', type=str, help='Output html directory', default=None)
args = p.parse_args()
print('Loading events from %s...' % args.i)
mabed = utils.load_events(args.i)
# format data
print('Preparing data...')
event_descriptions = []
impact_data = []
formatted_dates = []
for i in range(0, mabed.corpus.time_slice_count):
formatted_dates.append(int(time.mktime(mabed.corpus.to_date(i).timetuple()))*1000)
for event in mabed.events:
mag = event[0]
main_term = event[2]
raw_anomaly = event[4]
formatted_anomaly = []
time_interval = event[1]
related_terms = []
for related_term in event[3]:
related_terms.append(related_term[0]+' ('+str("{0:.2f}".format(related_term[1]))+')')
event_descriptions.append((mag,
str(mabed.corpus.to_date(time_interval[0])),
str(mabed.corpus.to_date(time_interval[1])),
main_term,
', '.join(related_terms)))
for i in range(0, mabed.corpus.time_slice_count):
value = 0
if time_interval[0] <= i <= time_interval[1]:
value = raw_anomaly[i]
if value < 0:
value = 0
formatted_anomaly.append('['+str(formatted_dates[i])+','+str(value)+']')
impact_data.append('{"key":"' + main_term + '", "values":[' + ','.join(formatted_anomaly) + ']}')
if args.o is not None:
if os.path.exists(args.o):
shutil.rmtree(args.o)
os.makedirs(args.o)
print('Freezing event browser into %s...' % args.o)
event_browser_freezer = Freezer(event_browser)
event_browser.config.update(
FREEZER_DESTINATION=args.o,
FREEZER_RELATIVE_URLS=True,
)
event_browser.debug = False
event_browser.config['ASSETS_DEBUG'] = False
event_browser_freezer.freeze()
print('Done.')
else:
event_browser.run(debug=False, host='localhost', port=2016)
|
mit
| -3,382,823,232,691,615,000
| 34.717647
| 105
| 0.553689
| false
| 3.657831
| false
| false
| false
|
mganeva/mantid
|
qt/python/mantidqt/widgets/codeeditor/test/test_multifileinterpreter.py
|
1
|
2516
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2017 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# This file is part of the mantid workbench.
#
#
from __future__ import (absolute_import, unicode_literals)
import unittest
from mantid.py3compat import mock
from mantidqt.utils.qt.testing import GuiTest
from mantidqt.utils.qt.testing.qt_widget_finder import QtWidgetFinder
from mantidqt.widgets.codeeditor.multifileinterpreter import MultiPythonFileInterpreter
MANTID_API_IMPORT = "from mantid.simpleapi import *\n"
PERMISSION_BOX_FUNC = ('mantidqt.widgets.codeeditor.scriptcompatibility.'
'permission_box_to_prepend_import')
class MultiPythonFileInterpreterTest(GuiTest, QtWidgetFinder):
def test_default_contains_single_editor(self):
widget = MultiPythonFileInterpreter()
self.assertEqual(1, widget.editor_count)
def test_add_editor(self):
widget = MultiPythonFileInterpreter()
self.assertEqual(1, widget.editor_count)
widget.append_new_editor()
self.assertEqual(2, widget.editor_count)
def test_open_file_in_new_tab_import_added(self):
test_string = "Test file\nLoad()"
widget = MultiPythonFileInterpreter()
mock_open_func = mock.mock_open(read_data=test_string)
with mock.patch(widget.__module__ + '.open', mock_open_func, create=True):
with mock.patch(PERMISSION_BOX_FUNC, lambda: True):
widget.open_file_in_new_tab(test_string)
self.assertEqual(widget.current_editor().editor.isModified(), True,
msg="Script not marked as modified.")
self.assertIn(MANTID_API_IMPORT, widget.current_editor().editor.text(),
msg="'simpleapi' import not added to script.")
def test_open_file_in_new_tab_no_import_added(self):
test_string = "Test file\n"
widget = MultiPythonFileInterpreter()
mock_open_func = mock.mock_open(read_data=test_string)
with mock.patch(widget.__module__ + '.open', mock_open_func, create=True):
with mock.patch(PERMISSION_BOX_FUNC, lambda: True):
widget.open_file_in_new_tab(test_string)
self.assertNotIn(MANTID_API_IMPORT,
widget.current_editor().editor.text())
if __name__ == '__main__':
unittest.main()
|
gpl-3.0
| -4,618,802,130,108,384,000
| 40.933333
| 87
| 0.675278
| false
| 3.62536
| true
| false
| false
|
matus-stehlik/glowing-batman
|
base/templatetags/roots_tags.py
|
1
|
1874
|
from django import template
from django.core.urlresolvers import reverse
from django.conf import settings
from django.template.base import TemplateSyntaxError
register = template.Library()
@register.simple_tag
def url_active(request, urls, *args, **kwargs):
if request.path in (reverse(url, args=list(*args), kwargs=dict(**kwargs))
for url in urls.split()):
return "active"
else:
return ""
@register.filter
def remove_uncomplete_latex(text):
# Even number of segments separated by $$ means uncomplete
# display equation
if len(text.split('$$')) % 2 == 0:
# Return the original text
return '$$'.join(text.split('$$')[:-1])
elif len(text.split('$')) % 2 == 0:
return '$'.join(text.split('$')[:-1])
else:
return text
class DefineNode(template.Node):
def __init__(self, name, nodelist):
self.name = name
self.nodelist = nodelist
def __repr__(self):
return "<DefineNode>"
def render(self, context):
context[self.name] = self.nodelist.render(context)
return ''
@register.tag
def define(parser, token):
"""
Adds a name to the context for referencing an arbitrarily defined block
of template code.
For example:
{% define my_block %}
This is the content.
{% enddefine %}
Now anywhere in the template:
{{ my_block }}
"""
bits = list(token.split_contents())
if len(bits) != 2:
raise TemplateSyntaxError("Expected format is: {% define variable %}")
name = bits[1]
nodelist = parser.parse(('enddefine',))
parser.delete_first_token()
return DefineNode(name, nodelist)
@register.filter
def access(value, arg):
return value.get(arg, {})
@register.simple_tag
def settings_value(name):
return getattr(settings, name, "")
|
mit
| -6,420,556,990,031,565,000
| 22.425
| 78
| 0.61953
| false
| 3.978769
| false
| false
| false
|
lzhjie/benchmark
|
client_redis.py
|
1
|
1166
|
# coding: utf-8
# Copyright (C) zhongjie luo <l.zhjie@qq.com>
import redis
from db_bench import DbConnection, multi_process_bench, Options
class StrictRedis(DbConnection):
def __init__(self, options):
super(StrictRedis, self).__init__(options)
self.__db = 0
self.__client = None
def connect(self):
self.__client = redis.StrictRedis(self.host, self.port, self.__db)
def disconnect(self):
self.__client = None
def insert(self, record):
k, v = record[0]
return self.__client.set(str(k), str(v), nx=True) == True
def search(self, record):
k, v = record[0]
return self.__client.get(str(k)) == str(v)
def delete(self, record):
k, v = record[0]
return self.__client.delete(str(k)) == True
def tear_down(self):
self.__client.flushdb()
def api_example():
pass
if __name__ == "__main__":
option = Options()
option.set("port", 6379)
if option.parse_option() is False:
exit(100)
print(option)
result = multi_process_bench(option, StrictRedis)
# print result
|
mit
| 9,135,346,619,652,901,000
| 22.808511
| 74
| 0.56518
| false
| 3.544073
| false
| false
| false
|
jaeilepp/mne-python
|
mne/simulation/tests/test_raw.py
|
1
|
13124
|
# Authors: Mark Wronkiewicz <wronk@uw.edu>
# Yousra Bekhti <yousra.bekhti@gmail.com>
# Eric Larson <larson.eric.d@gmail.com>
#
# License: BSD (3-clause)
import os.path as op
import warnings
from copy import deepcopy
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
from nose.tools import assert_true, assert_raises, assert_equal
from mne import (read_source_spaces, pick_types, read_trans, read_cov,
make_sphere_model, create_info, setup_volume_source_space,
find_events, Epochs, fit_dipole, transform_surface_to,
make_ad_hoc_cov, SourceEstimate, setup_source_space)
from mne.chpi import _calculate_chpi_positions, read_head_pos, _get_hpi_info
from mne.tests.test_chpi import _assert_quats
from mne.datasets import testing
from mne.simulation import simulate_sparse_stc, simulate_raw
from mne.io import read_raw_fif, RawArray
from mne.time_frequency import psd_welch
from mne.utils import _TempDir, run_tests_if_main, slow_test
warnings.simplefilter('always')
data_path = testing.data_path(download=False)
raw_fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif')
cov_fname = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-cov.fif')
trans_fname = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-trans.fif')
subjects_dir = op.join(data_path, 'subjects')
bem_path = op.join(subjects_dir, 'sample', 'bem')
src_fname = op.join(bem_path, 'sample-oct-2-src.fif')
bem_fname = op.join(bem_path, 'sample-320-320-320-bem-sol.fif')
raw_chpi_fname = op.join(data_path, 'SSS', 'test_move_anon_raw.fif')
pos_fname = op.join(data_path, 'SSS', 'test_move_anon_raw_subsampled.pos')
def _make_stc(raw, src):
"""Helper to make a STC."""
seed = 42
sfreq = raw.info['sfreq'] # Hz
tstep = 1. / sfreq
n_samples = len(raw.times) // 10
times = np.arange(0, n_samples) * tstep
stc = simulate_sparse_stc(src, 10, times, random_state=seed)
return stc
def _get_data():
"""Helper to get some starting data."""
# raw with ECG channel
raw = read_raw_fif(raw_fname).crop(0., 5.0).load_data()
data_picks = pick_types(raw.info, meg=True, eeg=True)
other_picks = pick_types(raw.info, meg=False, stim=True, eog=True)
picks = np.sort(np.concatenate((data_picks[::16], other_picks)))
raw = raw.pick_channels([raw.ch_names[p] for p in picks])
raw.info.normalize_proj()
ecg = RawArray(np.zeros((1, len(raw.times))),
create_info(['ECG 063'], raw.info['sfreq'], 'ecg'))
for key in ('dev_head_t', 'buffer_size_sec', 'highpass', 'lowpass', 'dig'):
ecg.info[key] = raw.info[key]
raw.add_channels([ecg])
src = read_source_spaces(src_fname)
trans = read_trans(trans_fname)
sphere = make_sphere_model('auto', 'auto', raw.info)
stc = _make_stc(raw, src)
return raw, src, stc, trans, sphere
@testing.requires_testing_data
def test_simulate_raw_sphere():
"""Test simulation of raw data with sphere model."""
seed = 42
raw, src, stc, trans, sphere = _get_data()
assert_true(len(pick_types(raw.info, meg=False, ecg=True)) == 1)
# head pos
head_pos_sim = dict()
# these will be at 1., 2., ... sec
shifts = [[0.001, 0., -0.001], [-0.001, 0.001, 0.]]
for time_key, shift in enumerate(shifts):
# Create 4x4 matrix transform and normalize
temp_trans = deepcopy(raw.info['dev_head_t'])
temp_trans['trans'][:3, 3] += shift
head_pos_sim[time_key + 1.] = temp_trans['trans']
#
# Test raw simulation with basic parameters
#
raw_sim = simulate_raw(raw, stc, trans, src, sphere, read_cov(cov_fname),
head_pos=head_pos_sim,
blink=True, ecg=True, random_state=seed)
raw_sim_2 = simulate_raw(raw, stc, trans_fname, src_fname, sphere,
cov_fname, head_pos=head_pos_sim,
blink=True, ecg=True, random_state=seed)
assert_array_equal(raw_sim_2[:][0], raw_sim[:][0])
# Test IO on processed data
tempdir = _TempDir()
test_outname = op.join(tempdir, 'sim_test_raw.fif')
raw_sim.save(test_outname)
raw_sim_loaded = read_raw_fif(test_outname, preload=True)
assert_allclose(raw_sim_loaded[:][0], raw_sim[:][0], rtol=1e-6, atol=1e-20)
del raw_sim, raw_sim_2
# with no cov (no noise) but with artifacts, most time periods should match
# but the EOG/ECG channels should not
for ecg, eog in ((True, False), (False, True), (True, True)):
raw_sim_3 = simulate_raw(raw, stc, trans, src, sphere,
cov=None, head_pos=head_pos_sim,
blink=eog, ecg=ecg, random_state=seed)
raw_sim_4 = simulate_raw(raw, stc, trans, src, sphere,
cov=None, head_pos=head_pos_sim,
blink=False, ecg=False, random_state=seed)
picks = np.arange(len(raw.ch_names))
diff_picks = pick_types(raw.info, meg=False, ecg=ecg, eog=eog)
these_picks = np.setdiff1d(picks, diff_picks)
close = np.isclose(raw_sim_3[these_picks][0],
raw_sim_4[these_picks][0], atol=1e-20)
assert_true(np.mean(close) > 0.7)
far = ~np.isclose(raw_sim_3[diff_picks][0],
raw_sim_4[diff_picks][0], atol=1e-20)
assert_true(np.mean(far) > 0.99)
del raw_sim_3, raw_sim_4
# make sure it works with EEG-only and MEG-only
raw_sim_meg = simulate_raw(raw.copy().pick_types(meg=True, eeg=False),
stc, trans, src, sphere, cov=None,
ecg=True, blink=True, random_state=seed)
raw_sim_eeg = simulate_raw(raw.copy().pick_types(meg=False, eeg=True),
stc, trans, src, sphere, cov=None,
ecg=True, blink=True, random_state=seed)
raw_sim_meeg = simulate_raw(raw.copy().pick_types(meg=True, eeg=True),
stc, trans, src, sphere, cov=None,
ecg=True, blink=True, random_state=seed)
assert_allclose(np.concatenate((raw_sim_meg[:][0], raw_sim_eeg[:][0])),
raw_sim_meeg[:][0], rtol=1e-7, atol=1e-20)
del raw_sim_meg, raw_sim_eeg, raw_sim_meeg
# check that different interpolations are similar given small movements
raw_sim = simulate_raw(raw, stc, trans, src, sphere, cov=None,
head_pos=head_pos_sim, interp='linear')
raw_sim_hann = simulate_raw(raw, stc, trans, src, sphere, cov=None,
head_pos=head_pos_sim, interp='hann')
assert_allclose(raw_sim[:][0], raw_sim_hann[:][0], rtol=1e-1, atol=1e-14)
del raw_sim, raw_sim_hann
# Make impossible transform (translate up into helmet) and ensure failure
head_pos_sim_err = deepcopy(head_pos_sim)
head_pos_sim_err[1.][2, 3] -= 0.1 # z trans upward 10cm
assert_raises(RuntimeError, simulate_raw, raw, stc, trans, src, sphere,
ecg=False, blink=False, head_pos=head_pos_sim_err)
assert_raises(RuntimeError, simulate_raw, raw, stc, trans, src,
bem_fname, ecg=False, blink=False,
head_pos=head_pos_sim_err)
# other degenerate conditions
assert_raises(TypeError, simulate_raw, 'foo', stc, trans, src, sphere)
assert_raises(TypeError, simulate_raw, raw, 'foo', trans, src, sphere)
assert_raises(ValueError, simulate_raw, raw, stc.copy().crop(0, 0),
trans, src, sphere)
stc_bad = stc.copy()
stc_bad.tstep += 0.1
assert_raises(ValueError, simulate_raw, raw, stc_bad, trans, src, sphere)
assert_raises(RuntimeError, simulate_raw, raw, stc, trans, src, sphere,
chpi=True) # no cHPI info
assert_raises(ValueError, simulate_raw, raw, stc, trans, src, sphere,
interp='foo')
assert_raises(TypeError, simulate_raw, raw, stc, trans, src, sphere,
head_pos=1.)
assert_raises(RuntimeError, simulate_raw, raw, stc, trans, src, sphere,
head_pos=pos_fname) # ends up with t>t_end
head_pos_sim_err = deepcopy(head_pos_sim)
head_pos_sim_err[-1.] = head_pos_sim_err[1.] # negative time
assert_raises(RuntimeError, simulate_raw, raw, stc, trans, src, sphere,
head_pos=head_pos_sim_err)
raw_bad = raw.copy()
raw_bad.info['dig'] = None
assert_raises(RuntimeError, simulate_raw, raw_bad, stc, trans, src, sphere,
blink=True)
@slow_test
@testing.requires_testing_data
def test_simulate_raw_bem():
"""Test simulation of raw data with BEM."""
raw, src, stc, trans, sphere = _get_data()
src = setup_source_space('sample', 'oct1', subjects_dir=subjects_dir)
for s in src:
s['nuse'] = 3
s['vertno'] = src[1]['vertno'][:3]
s['inuse'].fill(0)
s['inuse'][s['vertno']] = 1
# use different / more complete STC here
vertices = [s['vertno'] for s in src]
stc = SourceEstimate(np.eye(sum(len(v) for v in vertices)), vertices,
0, 1. / raw.info['sfreq'])
raw_sim_sph = simulate_raw(raw, stc, trans, src, sphere, cov=None)
raw_sim_bem = simulate_raw(raw, stc, trans, src, bem_fname, cov=None,
n_jobs=2)
# some components (especially radial) might not match that well,
# so just make sure that most components have high correlation
assert_array_equal(raw_sim_sph.ch_names, raw_sim_bem.ch_names)
picks = pick_types(raw.info, meg=True, eeg=True)
n_ch = len(picks)
corr = np.corrcoef(raw_sim_sph[picks][0], raw_sim_bem[picks][0])
assert_array_equal(corr.shape, (2 * n_ch, 2 * n_ch))
med_corr = np.median(np.diag(corr[:n_ch, -n_ch:]))
assert_true(med_corr > 0.65, msg=med_corr)
# do some round-trip localization
for s in src:
transform_surface_to(s, 'head', trans)
locs = np.concatenate([s['rr'][s['vertno']] for s in src])
tmax = (len(locs) - 1) / raw.info['sfreq']
cov = make_ad_hoc_cov(raw.info)
# The tolerance for the BEM is surprisingly high (28) but I get the same
# result when using MNE-C and Xfit, even when using a proper 5120 BEM :(
for use_raw, bem, tol in ((raw_sim_sph, sphere, 1),
(raw_sim_bem, bem_fname, 31)):
events = find_events(use_raw, 'STI 014')
assert_equal(len(locs), 6)
evoked = Epochs(use_raw, events, 1, 0, tmax, baseline=None).average()
assert_equal(len(evoked.times), len(locs))
fits = fit_dipole(evoked, cov, bem, trans, min_dist=1.)[0].pos
diffs = np.sqrt(np.sum((locs - fits) ** 2, axis=-1)) * 1000
med_diff = np.median(diffs)
assert_true(med_diff < tol, msg='%s: %s' % (bem, med_diff))
@slow_test
@testing.requires_testing_data
def test_simulate_raw_chpi():
"""Test simulation of raw data with cHPI."""
raw = read_raw_fif(raw_chpi_fname, allow_maxshield='yes')
picks = np.arange(len(raw.ch_names))
picks = np.setdiff1d(picks, pick_types(raw.info, meg=True, eeg=True)[::4])
raw.load_data().pick_channels([raw.ch_names[pick] for pick in picks])
raw.info.normalize_proj()
sphere = make_sphere_model('auto', 'auto', raw.info)
# make sparse spherical source space
sphere_vol = tuple(sphere['r0'] * 1000.) + (sphere.radius * 1000.,)
src = setup_volume_source_space('sample', sphere=sphere_vol, pos=70.)
stc = _make_stc(raw, src)
# simulate data with cHPI on
raw_sim = simulate_raw(raw, stc, None, src, sphere, cov=None, chpi=False,
interp='zero')
# need to trim extra samples off this one
raw_chpi = simulate_raw(raw, stc, None, src, sphere, cov=None, chpi=True,
head_pos=pos_fname, interp='zero')
# test cHPI indication
hpi_freqs, hpi_pick, hpi_ons = _get_hpi_info(raw.info)
assert_allclose(raw_sim[hpi_pick][0], 0.)
assert_allclose(raw_chpi[hpi_pick][0], hpi_ons.sum())
# test that the cHPI signals make some reasonable values
picks_meg = pick_types(raw.info, meg=True, eeg=False)
picks_eeg = pick_types(raw.info, meg=False, eeg=True)
for picks in [picks_meg[:3], picks_eeg[:3]]:
psd_sim, freqs_sim = psd_welch(raw_sim, picks=picks)
psd_chpi, freqs_chpi = psd_welch(raw_chpi, picks=picks)
assert_array_equal(freqs_sim, freqs_chpi)
freq_idx = np.sort([np.argmin(np.abs(freqs_sim - f))
for f in hpi_freqs])
if picks is picks_meg:
assert_true((psd_chpi[:, freq_idx] >
100 * psd_sim[:, freq_idx]).all())
else:
assert_allclose(psd_sim, psd_chpi, atol=1e-20)
# test localization based on cHPI information
quats_sim = _calculate_chpi_positions(raw_chpi, t_step_min=10.)
quats = read_head_pos(pos_fname)
_assert_quats(quats, quats_sim, dist_tol=5e-3, angle_tol=3.5)
run_tests_if_main()
|
bsd-3-clause
| -239,478,660,592,898,980
| 45.211268
| 79
| 0.604846
| false
| 3.036557
| true
| false
| false
|
CMUSV-VisTrails/WorkflowRecommendation
|
vistrails/core/cache/utils.py
|
1
|
2304
|
###############################################################################
##
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: contact@vistrails.org
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""Helper functions for cache package."""
try:
import hashlib
sha_hash = hashlib.sha1
except ImportError:
import sha
sha_hash = sha.new
##############################################################################
def hash_list(lst, hasher_f, constant_hasher_map={}):
hasher = sha_hash()
hash_l = [hasher_f(el, constant_hasher_map) for el in lst]
hash_l.sort()
for hel in hash_l: hasher.update(hel)
return hasher.digest()
|
bsd-3-clause
| 1,477,760,557,045,283,600
| 45.08
| 79
| 0.659722
| false
| 4.580517
| false
| false
| false
|
a67878813/script
|
flvrepair.py
|
1
|
3480
|
# coding: utf-8
#2019.11.24 refixed in ubuntu19.10
#使用前需安装yamdi
#sudo apt install yamdi
#ubuntu 16.04LTS
#与win机器mount命令示例:
#sudo mount -t cifs -o username="用户名",password="密码",uid=1000 //192.168.2.90/raid5_5-9/直播录像 /mnt2
#若目录不存在,terminal中
#sudo mkdir mnt2
#sudo chown -R linux用户名:linux用户名 mnt2
#使用方法 :
#1.修改脚本预计遍历flv的目录(contents变量)后
#2.终端(terminal)中 cd 进入相应目录。
#python3 flvrepair2.py
import os
import os.path
import json
import random
import pickle
import time
from termcolor import colored
#子进程相关模块
import subprocess
#contents为预计遍历flv目录
contents = "/mnt"
#******************************
error_counts = 0
s =[]
for root, dirs, files in os.walk(contents):
for name in files:
s.append(os.path.join(root, name))
#可注释掉
#print(s)
end_list = []
try:
with open(contents+'/done_list.json', 'r') as r:
done_list = json.load(r)
except FileNotFoundError:
print("donelist is not exist")
done_list = []
with open(contents+'/done_list.json', 'w') as f:
f.write(json.dumps(done_list))
for line in s:
#未修复的flv文件,追加到end_list中
if (".flv" in line) and (line not in done_list):
end_list.append(line)
print_list=end_list[:3]
for i in print_list:
print(i)
print(colored((" 未添加meta数据的flv文件数 = " + str(len(end_list))),"cyan"))
#判断临时目录是否存在
if os.path.isdir(contents+"/_temp"):
pass
else:
os.mkdir(contents+"/_temp")
print("临时目录已建立")
#
#os.remove(contents+"/_temp")
for line in end_list:
#
try:
ctime = os.path.getctime(line)
except :
error_counts +=1
continue
#
salt_ = random.randint(110, 880)
print(colored("进行meta注入 = "+str(line),"green"))
try:
child = subprocess.Popen(["/usr/bin/yamdi","-i",line,"-o",contents+"/_temp/output.tmp"],stderr=subprocess.STDOUT)
child.wait()
except:
error_counts +=1
print(colored("meta信息写入错误","red"))
print(colored(line,"red"))
print(child.stderr)
continue
time.sleep(10)
try:
child2 = subprocess.Popen(["mv","-f",contents+"/_temp/output.tmp",line],stderr=subprocess.STDOUT)
child2.wait() #等待子进程结束,父进程继续
except :
error_counts +=1
print(colored("mv错误","red"))
print(colored(line,"red"))
continue
time.sleep(10)
#
try:
os.utime(line, (ctime,ctime))
except :
error_counts +=1
continue
print(colored("meta注入完成 = "+str(line),"green"))
print(colored("next","green"))
#更新 完成列表
try:
with open(contents+'/done_list.json', 'r') as r:
done_list = json.load(r)
except:
continue
done_list.append(line)
with open(contents+'/done_list.json', 'w') as f:
f.write(json.dumps(done_list))
try:
with open(contents+'/done_list.pik', 'wb') as f:
pickle.dump(done_list,f)
except:
continue
print(colored(("Error_Counts =" + str(error_counts)),"red"))
if error_counts == 0 :
print(colored("全部完成","green"))
else:
print(colored("全部完成 with error = "+str(error_counts),"red"))
|
apache-2.0
| -2,722,938,224,545,431,000
| 20.682759
| 121
| 0.594148
| false
| 2.626566
| false
| false
| false
|
jzawar/highstreet
|
app.py
|
1
|
5268
|
#!/usr/bin/env python
from __future__ import print_function
from future.standard_library import install_aliases
install_aliases()
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
import json
import os
import re
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = processRequest(req)
res = json.dumps(res, indent=4)
# print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def suggestDeodrant(condition, person, city):
print(person)
url = 'http://ipinfo.io/json'
res = urlopen(url)
dt = json.load(res)
IP=dt['ip']
org=dt['org']
currCity =dt['city']
country =dt['country']
region=dt['region']
humidWeatherList = ['Cloudy','mostly cloudy (night)','mostly cloudy (day)','partly cloudy (night)','partlycloudy (day)','tornado','tropical storm','hurricane','severe thunderstorms','thunderstorms','mixed rain and snow','mixed rain and sleet','mixed snow and sleet','freezing drizzle','drizzle','freezing rain','Showers','snow flurries','light snow showers','blowing snow','snow','hail','sleet','mixed rain and hail','thundershowers','snow showers','isolated','thundershowers'];
hotWeatherList = ['dust','foggy','haze','smoky','blustery','windy','cold','clear (night)','sunny','fair (night)','fair (day)','hot','isolated thunderstorms','scattered thunderstorms','scattered thunderstorms','scattered showers','heavy snow','scattered snow showers','heavy snow','partly cloudy'];
if(condition in humidWeatherList):
if person == 'Men':
condition = 'Hmmm.. The weather in '+city+' looks '+condition+'. I suggest these <a href="/highstreetstorefront/highstreet/en/highstreet-Catalogue/Perfumes/Men-Perfumes/Moist/c/580">Anti-Perspirant Deodrants</a> for ' + person
else:
condition = 'Hmmm.. The weather in '+city+' looks '+condition+'. I suggest these <a href="/highstreetstorefront/highstreet/en/highstreet-Catalogue/Perfumes/Women-Perfumes/Moist/c/395">Anti-Perspirant Deodrants</a> for ' + person
else:
if person == 'Men':
condition = 'Hmmm.. The weather in '+city+' looks '+condition+'. I suggest these <a href="/highstreetstorefront/highstreet/en/highstreet-Catalogue/Perfumes/Men-Perfumes/Dry/c/570">Perfumed Deodrants</a> for ' + person
else:
condition = 'Hmmm.. The weather in '+city+' looks '+condition+'. I suggest these <a href="/highstreetstorefront/highstreet/en/highstreet-Catalogue/Perfumes/Women-Perfumes/Dry/c/390">Perfumed Deodrants</a> for ' + person
if currCity != city:
condition = condition+' I see you are currently in '+currCity+'. Are you making an air travel to '+city+'?'
return condition
def processRequest(req):
if req.get("result").get("action") != "yahooWeatherForecast":
return {}
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = makeYqlQuery(req)
if yql_query is None:
return {}
yql_url = baseurl + urlencode({'q': yql_query}) + "&format=json"
result = urlopen(yql_url).read()
data = json.loads(result)
res = makeWebhookResult(data,req)
return res
def makeYqlQuery(req):
result = req.get("result")
parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='" + city + "')"
def makeWebhookResult(data, req):
query = data.get('query')
if query is None:
return {}
result = query.get('results')
if result is None:
return {}
channel = result.get('channel')
if channel is None:
return {}
item = channel.get('item')
location = channel.get('location')
units = channel.get('units')
if (location is None) or (item is None) or (units is None):
return {}
condition = item.get('condition')
if condition is None:
return {}
# print(json.dumps(item, indent=4))
#speech = "Today in " + location.get('city') + ": " + condition.get('text') + \
# ", the temperature is " + condition.get('temp') + " " + units.get('temperature')
speech = "Hmmm.. It looks " + condition.get('text') + " in " + location.get('city')
airesult = req.get("result")
parameters = airesult.get("parameters")
person = parameters.get('Person')
city = parameters.get("geo-city")
returnedSpeech = suggestDeodrant(condition.get('text'), person, city)
print(returnedSpeech)
#print("Response:")
#print(speech)
return {
"speech": returnedSpeech,
"displayText": returnedSpeech,
# "data": data,
# "contextOut": [],
"source": "apiai-weather-webhook-sample"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print("Starting app on port %d" % port)
app.run(debug=False, port=port, host='0.0.0.0')
|
apache-2.0
| 1,609,381,478,073,361,700
| 35.583333
| 482
| 0.656986
| false
| 3.361838
| false
| false
| false
|
Salandora/OctoPrint
|
src/octoprint/server/util/__init__.py
|
1
|
6142
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
from octoprint.settings import settings
import octoprint.timelapse
import octoprint.server
from octoprint.users import ApiUser
from octoprint.util import deprecated
import flask as _flask
import flask_login
import flask_principal
from . import flask
from . import sockjs
from . import tornado
from . import watchdog
def enforceApiKeyRequestHandler():
"""
``before_request`` handler for blueprints which makes sure an API key is provided
"""
import octoprint.server
if _flask.request.method == 'OPTIONS':
# we ignore OPTIONS requests here
return
if _flask.request.endpoint and (_flask.request.endpoint == "static" or _flask.request.endpoint.endswith(".static")):
# no further handling for static resources
return
apikey = get_api_key(_flask.request)
if apikey is None:
return _flask.make_response("No API key provided", 401)
if apikey != octoprint.server.UI_API_KEY and not settings().getBoolean(["api", "enabled"]):
# api disabled => 401
return _flask.make_response("API disabled", 401)
apiKeyRequestHandler = deprecated("apiKeyRequestHandler has been renamed to enforceApiKeyRequestHandler")(enforceApiKeyRequestHandler)
def loginFromApiKeyRequestHandler():
"""
``before_request`` handler for blueprints which creates a login session for the provided api key (if available)
UI_API_KEY and app session keys are handled as anonymous keys here and ignored.
"""
apikey = get_api_key(_flask.request)
if apikey and apikey != octoprint.server.UI_API_KEY and not octoprint.server.appSessionManager.validate(apikey):
user = get_user_for_apikey(apikey)
if user is not None and not user.is_anonymous and flask_login.login_user(user, remember=False):
flask_principal.identity_changed.send(_flask.current_app._get_current_object(),
identity=flask_principal.Identity(user.get_id()))
else:
return _flask.make_response("Invalid API key", 401)
def corsRequestHandler():
"""
``before_request`` handler for blueprints which sets CORS headers for OPTIONS requests if enabled
"""
if _flask.request.method == 'OPTIONS' and settings().getBoolean(["api", "allowCrossOrigin"]):
# reply to OPTIONS request for CORS headers
return optionsAllowOrigin(_flask.request)
def corsResponseHandler(resp):
"""
``after_request`` handler for blueprints for which CORS is supported.
Sets ``Access-Control-Allow-Origin`` headers for ``Origin`` request header on response.
"""
# Allow crossdomain
allowCrossOrigin = settings().getBoolean(["api", "allowCrossOrigin"])
if _flask.request.method != 'OPTIONS' and 'Origin' in _flask.request.headers and allowCrossOrigin:
resp.headers['Access-Control-Allow-Origin'] = _flask.request.headers['Origin']
return resp
def noCachingResponseHandler(resp):
"""
``after_request`` handler for blueprints which shall set no caching headers
on their responses.
Sets ``Cache-Control``, ``Pragma`` and ``Expires`` headers accordingly
to prevent all client side caching from taking place.
"""
return flask.add_non_caching_response_headers(resp)
def noCachingExceptGetResponseHandler(resp):
"""
``after_request`` handler for blueprints which shall set no caching headers
on their responses to any requests that are not sent with method ``GET``.
See :func:`noCachingResponseHandler`.
"""
if _flask.request.method == "GET":
return flask.add_no_max_age_response_headers(resp)
else:
return flask.add_non_caching_response_headers(resp)
def optionsAllowOrigin(request):
"""
Shortcut for request handling for CORS OPTIONS requests to set CORS headers.
"""
resp = _flask.current_app.make_default_options_response()
# Allow the origin which made the XHR
resp.headers['Access-Control-Allow-Origin'] = request.headers['Origin']
# Allow the actual method
resp.headers['Access-Control-Allow-Methods'] = request.headers['Access-Control-Request-Method']
# Allow for 10 seconds
resp.headers['Access-Control-Max-Age'] = "10"
# 'preflight' request contains the non-standard headers the real request will have (like X-Api-Key)
customRequestHeaders = request.headers.get('Access-Control-Request-Headers', None)
if customRequestHeaders is not None:
# If present => allow them all
resp.headers['Access-Control-Allow-Headers'] = customRequestHeaders
return resp
def get_user_for_apikey(apikey):
if settings().getBoolean(["api", "enabled"]) and apikey is not None:
if apikey == settings().get(["api", "key"]) or octoprint.server.appSessionManager.validate(apikey):
# master key or an app session key was used
return ApiUser()
elif octoprint.server.userManager.enabled:
# user key might have been used
return octoprint.server.userManager.findUser(apikey=apikey)
return None
def get_api_key(request):
# Check Flask GET/POST arguments
if hasattr(request, "values") and "apikey" in request.values:
return request.values["apikey"]
# Check Tornado GET/POST arguments
if hasattr(request, "arguments") and "apikey" in request.arguments \
and len(request.arguments["apikey"]) > 0 and len(request.arguments["apikey"].strip()) > 0:
return request.arguments["apikey"]
# Check Tornado and Flask headers
if "X-Api-Key" in request.headers.keys():
return request.headers.get("X-Api-Key")
return None
def get_plugin_hash():
from octoprint.plugin import plugin_manager
plugin_signature = lambda impl: "{}:{}".format(impl._identifier, impl._plugin_version)
template_plugins = map(plugin_signature, plugin_manager().get_implementations(octoprint.plugin.TemplatePlugin))
asset_plugins = map(plugin_signature, plugin_manager().get_implementations(octoprint.plugin.AssetPlugin))
ui_plugins = sorted(set(template_plugins + asset_plugins))
import hashlib
plugin_hash = hashlib.sha1()
plugin_hash.update(",".join(ui_plugins))
return plugin_hash.hexdigest()
|
agpl-3.0
| -1,420,406,611,270,146,000
| 32.736264
| 134
| 0.744788
| false
| 3.555298
| false
| false
| false
|
eharney/cinder
|
cinder/tests/functional/test_volumes.py
|
1
|
5004
|
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
from cinder.tests.functional import functional_helpers
from cinder.volume import configuration
class VolumesTest(functional_helpers._FunctionalTestBase):
_vol_type_name = 'functional_test_type'
def setUp(self):
super(VolumesTest, self).setUp()
self.api.create_type(self._vol_type_name)
def _get_flags(self):
f = super(VolumesTest, self)._get_flags()
f['volume_driver'] = (
{'v': 'cinder.tests.fake_driver.FakeLoggingVolumeDriver',
'g': configuration.SHARED_CONF_GROUP})
f['default_volume_type'] = {'v': self._vol_type_name}
return f
def test_get_volumes_summary(self):
"""Simple check that listing volumes works."""
volumes = self.api.get_volumes(False)
self.assertIsNotNone(volumes)
def test_get_volumes(self):
"""Simple check that listing volumes works."""
volumes = self.api.get_volumes()
self.assertIsNotNone(volumes)
def test_create_and_delete_volume(self):
"""Creates and deletes a volume."""
# Create volume
created_volume = self.api.post_volume({'volume': {'size': 1}})
self.assertTrue(uuidutils.is_uuid_like(created_volume['id']))
created_volume_id = created_volume['id']
# Check it's there
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
self.assertEqual(self._vol_type_name, found_volume['volume_type'])
# It should also be in the all-volume list
volumes = self.api.get_volumes()
volume_names = [volume['id'] for volume in volumes]
self.assertIn(created_volume_id, volume_names)
# Wait (briefly) for creation. Delay is due to the 'message queue'
found_volume = self._poll_volume_while(created_volume_id, ['creating'])
# It should be available...
self.assertEqual('available', found_volume['status'])
# Delete the volume
self.api.delete_volume(created_volume_id)
# Wait (briefly) for deletion. Delay is due to the 'message queue'
found_volume = self._poll_volume_while(created_volume_id, ['deleting'])
# Should be gone
self.assertFalse(found_volume)
def test_create_volume_with_metadata(self):
"""Creates a volume with metadata."""
# Create volume
metadata = {'key1': 'value1',
'key2': 'value2'}
created_volume = self.api.post_volume(
{'volume': {'size': 1,
'metadata': metadata}})
self.assertTrue(uuidutils.is_uuid_like(created_volume['id']))
created_volume_id = created_volume['id']
# Check it's there and metadata present
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
self.assertEqual(metadata, found_volume['metadata'])
def test_create_volume_in_availability_zone(self):
"""Creates a volume in availability_zone."""
# Create volume
availability_zone = 'nova'
created_volume = self.api.post_volume(
{'volume': {'size': 1,
'availability_zone': availability_zone}})
self.assertTrue(uuidutils.is_uuid_like(created_volume['id']))
created_volume_id = created_volume['id']
# Check it's there and availability zone present
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
self.assertEqual(availability_zone, found_volume['availability_zone'])
def test_create_and_update_volume(self):
# Create vol1
created_volume = self.api.post_volume({'volume': {
'size': 1, 'name': 'vol1'}})
self.assertEqual('vol1', created_volume['name'])
created_volume_id = created_volume['id']
# update volume
body = {'volume': {'name': 'vol-one'}}
updated_volume = self.api.put_volume(created_volume_id, body)
self.assertEqual('vol-one', updated_volume['name'])
# check for update
found_volume = self.api.get_volume(created_volume_id)
self.assertEqual(created_volume_id, found_volume['id'])
self.assertEqual('vol-one', found_volume['name'])
|
apache-2.0
| 4,194,754,191,102,223,000
| 38.09375
| 79
| 0.635092
| false
| 3.968279
| true
| false
| false
|
genzgd/Lampost-Mud
|
lampmud/mud/chat.py
|
1
|
1548
|
from lampost.gameops.action import ActionError
from lampost.di.resource import Injected, module_inject
from lampmud.mud.action import mud_action
sm = Injected('session_manager')
module_inject(__name__)
@mud_action('emote', target_class='cmd_str')
def emote(source, target):
source.broadcast(raw="{}{} {}".format('' if source.imm_level else ':', source.name, target))
@mud_action('tell', target_class="player_online", obj_class="cmd_str")
def tell(source, target, obj):
tell_message(source, target, obj)
def tell_message(source, player, statement):
if not statement:
return source.display_line("Say what to " + player.name + "?")
player.last_tell = source.dbo_id
player.display_line(source.name + " tells you, `" + statement + "'", 'tell_from')
source.display_line("You tell " + player.name + ", `" + statement + "'", 'tell_to')
@mud_action('reply', target_class='cmd_str')
def reply(source, target):
if not source.last_tell:
raise ActionError("You have not received a tell recently.")
session = sm.player_session(source.last_tell)
if session:
tell_message(source, session.player, target)
else:
source.last_tell = None
return source.display_line("{} is no longer logged in".format(source.last_tell))
@mud_action('say', target_class='cmd_str')
def say(source, target):
source.display_line("You say, `{}'".format(target), display='say')
source.broadcast(raw="{} says, `{}'".format(source.name, target),
display='say', silent=True)
|
mit
| 6,273,739,487,291,257,000
| 35
| 96
| 0.662791
| false
| 3.372549
| false
| false
| false
|
googleapis/googleapis-gen
|
google/cloud/videointelligence/v1p1beta1/videointelligence-v1p1beta1-py/google/cloud/videointelligence_v1p1beta1/services/video_intelligence_service/transports/grpc_asyncio.py
|
1
|
12936
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.videointelligence_v1p1beta1.types import video_intelligence
from google.longrunning import operations_pb2 # type: ignore
from .base import VideoIntelligenceServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import VideoIntelligenceServiceGrpcTransport
class VideoIntelligenceServiceGrpcAsyncIOTransport(VideoIntelligenceServiceTransport):
"""gRPC AsyncIO backend transport for VideoIntelligenceService.
Service that implements Google Cloud Video Intelligence API.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'videointelligence.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'videointelligence.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def annotate_video(self) -> Callable[
[video_intelligence.AnnotateVideoRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the annotate video method over gRPC.
Performs asynchronous video annotation. Progress and results can
be retrieved through the ``google.longrunning.Operations``
interface. ``Operation.metadata`` contains
``AnnotateVideoProgress`` (progress). ``Operation.response``
contains ``AnnotateVideoResponse`` (results).
Returns:
Callable[[~.AnnotateVideoRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'annotate_video' not in self._stubs:
self._stubs['annotate_video'] = self.grpc_channel.unary_unary(
'/google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService/AnnotateVideo',
request_serializer=video_intelligence.AnnotateVideoRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['annotate_video']
__all__ = (
'VideoIntelligenceServiceGrpcAsyncIOTransport',
)
|
apache-2.0
| -4,601,164,260,622,470,700
| 45.365591
| 99
| 0.619125
| false
| 4.782255
| false
| false
| false
|
bioinform/somaticseq
|
somaticseq/vcfModifier/modify_JointSNVMix2.py
|
1
|
3347
|
#!/usr/bin/env python3
import argparse
import somaticseq.genomicFileHandler.genomic_file_handlers as genome
def run():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Variant Call Type, i.e., snp or indel
parser.add_argument('-infile', '--input-vcf', type=str, help='Input VCF file', required=True)
parser.add_argument('-outfile', '--output-vcf', type=str, help='Output VCF file', required=True)
# Parse the arguments:
args = parser.parse_args()
infile = args.input_vcf
outfile = args.output_vcf
return infile, outfile
def convert(infile, outfile):
idx_chrom,idx_pos,idx_id,idx_ref,idx_alt,idx_qual,idx_filter,idx_info,idx_format,idx_SM1,idx_SM2 = 0,1,2,3,4,5,6,7,8,9,10
with genome.open_textfile(infile) as vcf, open(outfile, 'w') as vcfout:
line_i = vcf.readline().rstrip()
# VCF header
while line_i.startswith('#'):
if line_i.startswith('##FORMAT=<ID=AD,'):
line_i = '##FORMAT=<ID=AD,Number=.,Type=Integer,Description="Allelic depths for the ref and alt alleles in the order listed">'
vcfout.write( line_i + '\n')
line_i = vcf.readline().rstrip()
while line_i:
item = line_i.split('\t')
format_items = item[idx_format].split(':')
if 'AD' in format_items and 'RD' in format_items:
# NORMAL
idx_ad = format_items.index('AD')
idx_rd = format_items.index('RD')
format_items.pop(idx_rd)
item_normal = item[idx_SM1].split(':')
normal_ad = int(item_normal[idx_ad])
normal_rd = int(item_normal[idx_rd])
try:
vaf = normal_ad / (normal_ad + normal_rd)
except ZeroDivisionError:
vaf = 0
if vaf > 0.8:
normal_gt = '1/1'
elif vaf > 0.25:
normal_gt = '0/1'
else:
normal_gt = '0/0'
item_normal[idx_ad] = '{},{}'.format( item_normal[idx_rd] , item_normal[idx_ad] )
item_normal.pop(idx_rd)
item_normal = [normal_gt] + item_normal
# TUMOR
item_tumor = item[idx_SM2].split(':')
tumor_ad = int(item_tumor[idx_ad])
tumor_rd = int(item_tumor[idx_rd])
try:
vaf = tumor_ad / (tumor_ad + tumor_rd)
except ZeroDivisionError:
vaf = 0
if vaf > 0.8:
tumor_gt = '1/1'
else:
tumor_gt = '0/1'
item_tumor[idx_ad] = '{},{}'.format( item_tumor[idx_rd] , item_tumor[idx_ad] )
item_tumor.pop(idx_rd)
item_tumor = [tumor_gt] + item_tumor
# Rewrite
item[idx_format] = 'GT:' + ':'.join(format_items)
item[idx_SM1] = ':'.join(item_normal)
item[idx_SM2] = ':'.join(item_tumor)
line_i = '\t'.join(item)
vcfout.write(line_i+'\n')
line_i = vcf.readline().rstrip()
if __name__ == '__main__':
infile, outfile = run()
convert(infile, outfile)
|
bsd-2-clause
| 4,254,031,095,837,364,000
| 29.990741
| 142
| 0.501942
| false
| 3.464803
| false
| false
| false
|
phenoxim/nova
|
nova/notifications/objects/instance.py
|
1
|
25085
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import nova.conf
from nova.notifications.objects import base
from nova.notifications.objects import flavor as flavor_payload
from nova.notifications.objects import keypair as keypair_payload
from nova.objects import base as nova_base
from nova.objects import fields
CONF = nova.conf.CONF
@nova_base.NovaObjectRegistry.register_notification
class InstancePayload(base.NotificationPayloadBase):
SCHEMA = {
'uuid': ('instance', 'uuid'),
'user_id': ('instance', 'user_id'),
'tenant_id': ('instance', 'project_id'),
'reservation_id': ('instance', 'reservation_id'),
'display_name': ('instance', 'display_name'),
'display_description': ('instance', 'display_description'),
'host_name': ('instance', 'hostname'),
'host': ('instance', 'host'),
'node': ('instance', 'node'),
'os_type': ('instance', 'os_type'),
'architecture': ('instance', 'architecture'),
'availability_zone': ('instance', 'availability_zone'),
'image_uuid': ('instance', 'image_ref'),
'key_name': ('instance', 'key_name'),
'kernel_id': ('instance', 'kernel_id'),
'ramdisk_id': ('instance', 'ramdisk_id'),
'created_at': ('instance', 'created_at'),
'launched_at': ('instance', 'launched_at'),
'terminated_at': ('instance', 'terminated_at'),
'deleted_at': ('instance', 'deleted_at'),
'updated_at': ('instance', 'updated_at'),
'state': ('instance', 'vm_state'),
'power_state': ('instance', 'power_state'),
'task_state': ('instance', 'task_state'),
'progress': ('instance', 'progress'),
'metadata': ('instance', 'metadata'),
'locked': ('instance', 'locked'),
'auto_disk_config': ('instance', 'auto_disk_config')
}
# Version 1.0: Initial version
# Version 1.1: add locked and display_description field
# Version 1.2: Add auto_disk_config field
# Version 1.3: Add key_name field
# Version 1.4: Add BDM related data
# Version 1.5: Add updated_at field
# Version 1.6: Add request_id field
VERSION = '1.6'
fields = {
'uuid': fields.UUIDField(),
'user_id': fields.StringField(nullable=True),
'tenant_id': fields.StringField(nullable=True),
'reservation_id': fields.StringField(nullable=True),
'display_name': fields.StringField(nullable=True),
'display_description': fields.StringField(nullable=True),
'host_name': fields.StringField(nullable=True),
'host': fields.StringField(nullable=True),
'node': fields.StringField(nullable=True),
'os_type': fields.StringField(nullable=True),
'architecture': fields.StringField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'flavor': fields.ObjectField('FlavorPayload'),
'image_uuid': fields.StringField(nullable=True),
'key_name': fields.StringField(nullable=True),
'kernel_id': fields.StringField(nullable=True),
'ramdisk_id': fields.StringField(nullable=True),
'created_at': fields.DateTimeField(nullable=True),
'launched_at': fields.DateTimeField(nullable=True),
'terminated_at': fields.DateTimeField(nullable=True),
'deleted_at': fields.DateTimeField(nullable=True),
'updated_at': fields.DateTimeField(nullable=True),
'state': fields.InstanceStateField(nullable=True),
'power_state': fields.InstancePowerStateField(nullable=True),
'task_state': fields.InstanceTaskStateField(nullable=True),
'progress': fields.IntegerField(nullable=True),
'ip_addresses': fields.ListOfObjectsField('IpPayload'),
'block_devices': fields.ListOfObjectsField('BlockDevicePayload',
nullable=True),
'metadata': fields.DictOfStringsField(),
'locked': fields.BooleanField(),
'auto_disk_config': fields.DiskConfigField(),
'request_id': fields.StringField(nullable=True),
}
def __init__(self, context, instance, bdms=None):
super(InstancePayload, self).__init__()
network_info = instance.get_network_info()
self.ip_addresses = IpPayload.from_network_info(network_info)
self.flavor = flavor_payload.FlavorPayload(flavor=instance.flavor)
if bdms is not None:
self.block_devices = BlockDevicePayload.from_bdms(bdms)
else:
self.block_devices = BlockDevicePayload.from_instance(instance)
# NOTE(Kevin_Zheng): Don't include request_id for periodic tasks,
# RequestContext for periodic tasks does not include project_id
# and user_id. Consider modify this once periodic tasks got a
# consistent request_id.
self.request_id = context.request_id if (context.project_id and
context.user_id) else None
self.populate_schema(instance=instance)
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionPayload(InstancePayload):
# No SCHEMA as all the additional fields are calculated
# Version 1.1: locked and display_description added to InstancePayload
# Version 1.2: Added auto_disk_config field to InstancePayload
# Version 1.3: Added key_name field to InstancePayload
# Version 1.4: Add BDM related data
# Version 1.5: Added updated_at field to InstancePayload
# Version 1.6: Added request_id field to InstancePayload
VERSION = '1.6'
fields = {
'fault': fields.ObjectField('ExceptionPayload', nullable=True),
'request_id': fields.StringField(nullable=True),
}
def __init__(self, context, instance, fault, bdms=None):
super(InstanceActionPayload, self).__init__(context=context,
instance=instance,
bdms=bdms)
self.fault = fault
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionVolumePayload(InstanceActionPayload):
# Version 1.0: Initial version
# Version 1.1: Added key_name field to InstancePayload
# Version 1.2: Add BDM related data
# Version 1.3: Added updated_at field to InstancePayload
# Version 1.4: Added request_id field to InstancePayload
VERSION = '1.4'
fields = {
'volume_id': fields.UUIDField()
}
def __init__(self, context, instance, fault, volume_id):
super(InstanceActionVolumePayload, self).__init__(
context=context,
instance=instance,
fault=fault)
self.volume_id = volume_id
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionVolumeSwapPayload(InstanceActionPayload):
# No SCHEMA as all the additional fields are calculated
# Version 1.1: locked and display_description added to InstancePayload
# Version 1.2: Added auto_disk_config field to InstancePayload
# Version 1.3: Added key_name field to InstancePayload
# Version 1.4: Add BDM related data
# Version 1.5: Added updated_at field to InstancePayload
# Version 1.6: Added request_id field to InstancePayload
VERSION = '1.6'
fields = {
'old_volume_id': fields.UUIDField(),
'new_volume_id': fields.UUIDField(),
}
def __init__(self, context, instance, fault, old_volume_id, new_volume_id):
super(InstanceActionVolumeSwapPayload, self).__init__(
context=context,
instance=instance,
fault=fault)
self.old_volume_id = old_volume_id
self.new_volume_id = new_volume_id
@nova_base.NovaObjectRegistry.register_notification
class InstanceCreatePayload(InstanceActionPayload):
# No SCHEMA as all the additional fields are calculated
# Version 1.2: Initial version. It starts at 1.2 to match with the version
# of the InstanceActionPayload at the time when this specific
# payload is created as a child of it so that the
# instance.create notification using this new payload does not
# have decreasing version.
# 1.3: Add keypairs field
# 1.4: Add key_name field to InstancePayload
# 1.5: Add BDM related data to InstancePayload
# 1.6: Add tags field to InstanceCreatePayload
# 1.7: Added updated_at field to InstancePayload
# 1.8: Added request_id field to InstancePayload
VERSION = '1.8'
fields = {
'keypairs': fields.ListOfObjectsField('KeypairPayload'),
'tags': fields.ListOfStringsField(),
}
def __init__(self, context, instance, fault, bdms):
super(InstanceCreatePayload, self).__init__(
context=context,
instance=instance,
fault=fault,
bdms=bdms)
self.keypairs = [keypair_payload.KeypairPayload(keypair=keypair)
for keypair in instance.keypairs]
self.tags = [instance_tag.tag
for instance_tag in instance.tags]
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionResizePrepPayload(InstanceActionPayload):
# No SCHEMA as all the additional fields are calculated
# Version 1.0: Initial version
# Version 1.1: Added request_id field to InstancePayload
VERSION = '1.1'
fields = {
'new_flavor': fields.ObjectField('FlavorPayload', nullable=True)
}
def __init__(self, context, instance, fault, new_flavor):
super(InstanceActionResizePrepPayload, self).__init__(
context=context,
instance=instance,
fault=fault)
self.new_flavor = new_flavor
@nova_base.NovaObjectRegistry.register_notification
class InstanceUpdatePayload(InstancePayload):
# Version 1.0: Initial version
# Version 1.1: locked and display_description added to InstancePayload
# Version 1.2: Added tags field
# Version 1.3: Added auto_disk_config field to InstancePayload
# Version 1.4: Added key_name field to InstancePayload
# Version 1.5: Add BDM related data
# Version 1.6: Added updated_at field to InstancePayload
# Version 1.7: Added request_id field to InstancePayload
VERSION = '1.7'
fields = {
'state_update': fields.ObjectField('InstanceStateUpdatePayload'),
'audit_period': fields.ObjectField('AuditPeriodPayload'),
'bandwidth': fields.ListOfObjectsField('BandwidthPayload'),
'old_display_name': fields.StringField(nullable=True),
'tags': fields.ListOfStringsField(),
}
def __init__(self, context, instance, state_update, audit_period,
bandwidth, old_display_name):
super(InstanceUpdatePayload, self).__init__(
context=context, instance=instance)
self.state_update = state_update
self.audit_period = audit_period
self.bandwidth = bandwidth
self.old_display_name = old_display_name
self.tags = [instance_tag.tag
for instance_tag in instance.tags.objects]
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionRescuePayload(InstanceActionPayload):
# Version 1.0: Initial version
# Version 1.1: Added request_id field to InstancePayload
VERSION = '1.1'
fields = {
'rescue_image_ref': fields.UUIDField(nullable=True)
}
def __init__(self, context, instance, fault, rescue_image_ref):
super(InstanceActionRescuePayload, self).__init__(
context=context,
instance=instance,
fault=fault)
self.rescue_image_ref = rescue_image_ref
@nova_base.NovaObjectRegistry.register_notification
class IpPayload(base.NotificationPayloadBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'label': fields.StringField(),
'mac': fields.MACAddressField(),
'meta': fields.DictOfStringsField(),
'port_uuid': fields.UUIDField(nullable=True),
'version': fields.IntegerField(),
'address': fields.IPV4AndV6AddressField(),
'device_name': fields.StringField(nullable=True)
}
def __init__(self, label, mac, meta, port_uuid, version, address,
device_name):
super(IpPayload, self).__init__()
self.label = label
self.mac = mac
self.meta = meta
self.port_uuid = port_uuid
self.version = version
self.address = address
self.device_name = device_name
@classmethod
def from_network_info(cls, network_info):
"""Returns a list of IpPayload object based on the passed
network_info.
"""
ips = []
if network_info is not None:
for vif in network_info:
for ip in vif.fixed_ips():
ips.append(cls(
label=vif["network"]["label"],
mac=vif["address"],
meta=vif["meta"],
port_uuid=vif["id"],
version=ip["version"],
address=ip["address"],
device_name=vif["devname"]))
return ips
@nova_base.NovaObjectRegistry.register_notification
class BandwidthPayload(base.NotificationPayloadBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'network_name': fields.StringField(),
'in_bytes': fields.IntegerField(),
'out_bytes': fields.IntegerField(),
}
def __init__(self, network_name, in_bytes, out_bytes):
super(BandwidthPayload, self).__init__()
self.network_name = network_name
self.in_bytes = in_bytes
self.out_bytes = out_bytes
@nova_base.NovaObjectRegistry.register_notification
class AuditPeriodPayload(base.NotificationPayloadBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'audit_period_beginning': fields.DateTimeField(),
'audit_period_ending': fields.DateTimeField(),
}
def __init__(self, audit_period_beginning, audit_period_ending):
super(AuditPeriodPayload, self).__init__()
self.audit_period_beginning = audit_period_beginning
self.audit_period_ending = audit_period_ending
@nova_base.NovaObjectRegistry.register_notification
class BlockDevicePayload(base.NotificationPayloadBase):
# Version 1.0: Initial version
VERSION = '1.0'
SCHEMA = {
'device_name': ('bdm', 'device_name'),
'boot_index': ('bdm', 'boot_index'),
'delete_on_termination': ('bdm', 'delete_on_termination'),
'volume_id': ('bdm', 'volume_id'),
'tag': ('bdm', 'tag')
}
fields = {
'device_name': fields.StringField(nullable=True),
'boot_index': fields.IntegerField(nullable=True),
'delete_on_termination': fields.BooleanField(default=False),
'volume_id': fields.UUIDField(),
'tag': fields.StringField(nullable=True)
}
def __init__(self, bdm):
super(BlockDevicePayload, self).__init__()
self.populate_schema(bdm=bdm)
@classmethod
def from_instance(cls, instance):
"""Returns a list of BlockDevicePayload objects based on the passed
bdms.
"""
if not CONF.notifications.bdms_in_notifications:
return None
instance_bdms = instance.get_bdms()
if instance_bdms is not None:
return cls.from_bdms(instance_bdms)
else:
return []
@classmethod
def from_bdms(cls, bdms):
"""Returns a list of BlockDevicePayload objects based on the passed
BlockDeviceMappingList.
"""
payloads = []
for bdm in bdms:
if bdm.volume_id is not None:
payloads.append(cls(bdm))
return payloads
@nova_base.NovaObjectRegistry.register_notification
class InstanceStateUpdatePayload(base.NotificationPayloadBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'old_state': fields.StringField(nullable=True),
'state': fields.StringField(nullable=True),
'old_task_state': fields.StringField(nullable=True),
'new_task_state': fields.StringField(nullable=True),
}
def __init__(self, old_state, state, old_task_state, new_task_state):
super(InstanceStateUpdatePayload, self).__init__()
self.old_state = old_state
self.state = state
self.old_task_state = old_task_state
self.new_task_state = new_task_state
@base.notification_sample('instance-delete-start.json')
@base.notification_sample('instance-delete-end.json')
@base.notification_sample('instance-pause-start.json')
@base.notification_sample('instance-pause-end.json')
@base.notification_sample('instance-unpause-start.json')
@base.notification_sample('instance-unpause-end.json')
@base.notification_sample('instance-resize-start.json')
@base.notification_sample('instance-resize-end.json')
@base.notification_sample('instance-resize-error.json')
@base.notification_sample('instance-suspend-start.json')
@base.notification_sample('instance-suspend-end.json')
@base.notification_sample('instance-power_on-start.json')
@base.notification_sample('instance-power_on-end.json')
@base.notification_sample('instance-power_off-start.json')
@base.notification_sample('instance-power_off-end.json')
@base.notification_sample('instance-reboot-start.json')
@base.notification_sample('instance-reboot-end.json')
@base.notification_sample('instance-reboot-error.json')
@base.notification_sample('instance-shutdown-start.json')
@base.notification_sample('instance-shutdown-end.json')
@base.notification_sample('instance-interface_attach-start.json')
@base.notification_sample('instance-interface_attach-end.json')
@base.notification_sample('instance-interface_attach-error.json')
@base.notification_sample('instance-shelve-start.json')
@base.notification_sample('instance-shelve-end.json')
@base.notification_sample('instance-resume-start.json')
@base.notification_sample('instance-resume-end.json')
@base.notification_sample('instance-restore-start.json')
@base.notification_sample('instance-restore-end.json')
@base.notification_sample('instance-evacuate.json')
@base.notification_sample('instance-resize_finish-start.json')
@base.notification_sample('instance-resize_finish-end.json')
@base.notification_sample('instance-live_migration_pre-start.json')
@base.notification_sample('instance-live_migration_pre-end.json')
@base.notification_sample('instance-live_migration_abort-start.json')
@base.notification_sample('instance-live_migration_abort-end.json')
# @base.notification_sample('instance-live_migration_post-start.json')
# @base.notification_sample('instance-live_migration_post-end.json')
@base.notification_sample('instance-live_migration_post_dest-start.json')
@base.notification_sample('instance-live_migration_post_dest-end.json')
@base.notification_sample('instance-live_migration_rollback-start.json')
@base.notification_sample('instance-live_migration_rollback-end.json')
# @base.notification_sample('instance-live_migration_rollback_dest-start.json')
# @base.notification_sample('instance-live_migration_rollback_dest-end.json')
@base.notification_sample('instance-rebuild-start.json')
@base.notification_sample('instance-rebuild-end.json')
@base.notification_sample('instance-rebuild-error.json')
@base.notification_sample('instance-interface_detach-start.json')
@base.notification_sample('instance-interface_detach-end.json')
@base.notification_sample('instance-resize_confirm-start.json')
@base.notification_sample('instance-resize_confirm-end.json')
@base.notification_sample('instance-resize_revert-start.json')
@base.notification_sample('instance-resize_revert-end.json')
@base.notification_sample('instance-shelve_offload-start.json')
@base.notification_sample('instance-shelve_offload-end.json')
@base.notification_sample('instance-soft_delete-start.json')
@base.notification_sample('instance-soft_delete-end.json')
@base.notification_sample('instance-trigger_crash_dump-start.json')
@base.notification_sample('instance-trigger_crash_dump-end.json')
@base.notification_sample('instance-unrescue-start.json')
@base.notification_sample('instance-unrescue-end.json')
@base.notification_sample('instance-unshelve-start.json')
@base.notification_sample('instance-unshelve-end.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceActionPayload')
}
@base.notification_sample('instance-update.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceUpdateNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceUpdatePayload')
}
@base.notification_sample('instance-volume_swap-start.json')
@base.notification_sample('instance-volume_swap-end.json')
@base.notification_sample('instance-volume_swap-error.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionVolumeSwapNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceActionVolumeSwapPayload')
}
@base.notification_sample('instance-volume_attach-start.json')
@base.notification_sample('instance-volume_attach-end.json')
@base.notification_sample('instance-volume_attach-error.json')
@base.notification_sample('instance-volume_detach-start.json')
@base.notification_sample('instance-volume_detach-end.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionVolumeNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceActionVolumePayload')
}
@base.notification_sample('instance-create-start.json')
@base.notification_sample('instance-create-end.json')
@base.notification_sample('instance-create-error.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceCreateNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceCreatePayload')
}
@base.notification_sample('instance-resize_prep-start.json')
@base.notification_sample('instance-resize_prep-end.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionResizePrepNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceActionResizePrepPayload')
}
@base.notification_sample('instance-snapshot-start.json')
@base.notification_sample('instance-snapshot-end.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionSnapshotNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceActionSnapshotPayload')
}
@base.notification_sample('instance-rescue-start.json')
@base.notification_sample('instance-rescue-end.json')
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionRescueNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('InstanceActionRescuePayload')
}
@nova_base.NovaObjectRegistry.register_notification
class InstanceActionSnapshotPayload(InstanceActionPayload):
# Version 1.6: Initial version. It starts at version 1.6 as
# instance.snapshot.start and .end notifications are switched
# from using InstanceActionPayload 1.5 to this new payload and
# also it added a new field so we wanted to keep the version
# number increasing to signal the change.
# Version 1.7: Added request_id field to InstancePayload
VERSION = '1.7'
fields = {
'snapshot_image_id': fields.UUIDField(),
}
def __init__(self, context, instance, fault, snapshot_image_id):
super(InstanceActionSnapshotPayload, self).__init__(
context=context,
instance=instance,
fault=fault)
self.snapshot_image_id = snapshot_image_id
|
apache-2.0
| -2,227,258,545,918,772,700
| 38.880763
| 79
| 0.673351
| false
| 3.995063
| false
| false
| false
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.2/Lib/distutils/command/check.py
|
1
|
5369
|
"""distutils.command.check
Implements the Distutils 'check' command.
"""
__revision__ = "$Id: check.py 85197 2010-10-03 14:18:09Z tarek.ziade $"
from distutils.core import Command
from distutils.errors import DistutilsSetupError
try:
# docutils is installed
from docutils.utils import Reporter
from docutils.parsers.rst import Parser
from docutils import frontend
from docutils import nodes
from io import StringIO
class SilentReporter(Reporter):
def __init__(self, source, report_level, halt_level, stream=None,
debug=0, encoding='ascii', error_handler='replace'):
self.messages = []
Reporter.__init__(self, source, report_level, halt_level, stream,
debug, encoding, error_handler)
def system_message(self, level, message, *children, **kwargs):
self.messages.append((level, message, children, kwargs))
HAS_DOCUTILS = True
except Exception:
# Catch all exceptions because exceptions besides ImportError probably
# indicate that docutils is not ported to Py3k.
HAS_DOCUTILS = False
class check(Command):
"""This command checks the meta-data of the package.
"""
description = ("perform some checks on the package")
user_options = [('metadata', 'm', 'Verify meta-data'),
('restructuredtext', 'r',
('Checks if long string meta-data syntax '
'are reStructuredText-compliant')),
('strict', 's',
'Will exit with an error if a check fails')]
boolean_options = ['metadata', 'restructuredtext', 'strict']
def initialize_options(self):
"""Sets default values for options."""
self.restructuredtext = 0
self.metadata = 1
self.strict = 0
self._warnings = 0
def finalize_options(self):
pass
def warn(self, msg):
"""Counts the number of warnings that occurs."""
self._warnings += 1
return Command.warn(self, msg)
def run(self):
"""Runs the command."""
# perform the various tests
if self.metadata:
self.check_metadata()
if self.restructuredtext:
if HAS_DOCUTILS:
self.check_restructuredtext()
elif self.strict:
raise DistutilsSetupError('The docutils package is needed.')
# let's raise an error in strict mode, if we have at least
# one warning
if self.strict and self._warnings > 0:
raise DistutilsSetupError('Please correct your package.')
def check_metadata(self):
"""Ensures that all required elements of meta-data are supplied.
name, version, URL, (author and author_email) or
(maintainer and maintainer_email)).
Warns if any are missing.
"""
metadata = self.distribution.metadata
missing = []
for attr in ('name', 'version', 'url'):
if not (hasattr(metadata, attr) and getattr(metadata, attr)):
missing.append(attr)
if missing:
self.warn("missing required meta-data: %s" % ', '.join(missing))
if metadata.author:
if not metadata.author_email:
self.warn("missing meta-data: if 'author' supplied, " +
"'author_email' must be supplied too")
elif metadata.maintainer:
if not metadata.maintainer_email:
self.warn("missing meta-data: if 'maintainer' supplied, " +
"'maintainer_email' must be supplied too")
else:
self.warn("missing meta-data: either (author and author_email) " +
"or (maintainer and maintainer_email) " +
"must be supplied")
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
data = self.distribution.get_long_description()
for warning in self._check_rst_data(data):
line = warning[-1].get('line')
if line is None:
warning = warning[1]
else:
warning = '%s (line %s)' % (warning[1], line)
self.warn(warning)
def _check_rst_data(self, data):
"""Returns warnings when the provided data doesn't compile."""
source_path = StringIO()
parser = Parser()
settings = frontend.OptionParser().get_default_values()
settings.tab_width = 4
settings.pep_references = None
settings.rfc_references = None
reporter = SilentReporter(source_path,
settings.report_level,
settings.halt_level,
stream=settings.warning_stream,
debug=settings.debug,
encoding=settings.error_encoding,
error_handler=settings.error_encoding_error_handler)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
try:
parser.parse(data, document)
except AttributeError:
reporter.messages.append((-1, 'Could not finish the parsing.',
'', {}))
return reporter.messages
|
mit
| -768,187,753,055,831,900
| 36.284722
| 78
| 0.575899
| false
| 4.672759
| false
| false
| false
|
cbrunker/quip
|
lib/Handlers.py
|
1
|
11422
|
#
# Response handlers for P2P Server
#
import asyncio
import logging
from functools import partial
from hashlib import sha1, sha384
from uuid import uuid4
from os import path
from lib.Database import getFriendRequests, getSigningKeys, setUidMask, storeAuthority, setFriendAuth, getMessageKeys, \
setAddress, getFileRequests, storeFileRequest, delFileRequests, delFriendRequests, getFriendChecksum, \
updateFriendDetails, storeHistory
from lib.Utils import isValidUUID, sha1sum
from lib.Constants import BTRUE, BFALSE, WRITE_END, COMMAND_LENGTH, NONEXISTANT, PROFILE_VALUE_SEPARATOR, \
LIMIT_AVATAR_SIZE, MODIFIED_FILE
######################################
# Server Dispatch Coroutine Handlers
######################################
@asyncio.coroutine
def friendAcceptance(reader, writer, safe, profileId, data, requests=None):
"""
Handle incoming friend request acceptance (P2P)
Once a request has been made, and the destination user accepts, the destination user contacts the request user
who runs this coroutine to complete the friendship.
Requester->Server (quip client, friendRequest)
Server->Destination (Heartbeat token)
Destination->Server (quip client, getRequests)
Destination->Requester (p2p client, friendCompletion) to (p2p server, this coroutine)
@param reader: StreamReader object
@param writer: StreamWriter objet
@param safe: crypto box
@param profileId: profile ID of logged in user
@param data: uid followed by hash of message
@param requests: (Optional) Recent outgoing friend requests {uid: message hash}
@return: Auth token
"""
if not requests:
requests = {}
# auth token
auth = None
try:
# verify required input data length
assert len(data) == 76
# user id, message hash
mhash, uid = data[:-36], data[-36:]
# valid UUID
assert isValidUUID(uid) is True
except AssertionError:
logging.info("\t".join(("Invalid friend completion data received", "Data: {!r}".format(data))))
return b''.join((BFALSE, WRITE_END)), auth
if uid not in requests:
# check db for older requests
requests.update(getFriendRequests(safe, profileId))
# obtain request information for this user (uid)
try:
msg, timestamp, _, rowid = requests[uid]
except KeyError:
logging.warning("\t".join(("Friend Request Failure",
"No friend request found for given user ID", "UID: {!r}".format(uid))))
return b''.join((BFALSE, WRITE_END)), auth
# ensure our potential friend has the correct hash value for the friend request
try:
assert mhash.decode('ascii') == sha1(b''.join((uid, msg))).hexdigest()
except (UnicodeDecodeError, AssertionError):
logging.warning("\t".join(("Friend Request Failure", "Hash values do not match",
"Sent: {!r}".format(mhash),
"Local: {!r}".format(sha1(b''.join((uid, msg))).hexdigest()))))
return b''.join((BFALSE, WRITE_END)), auth
# hash value has matched, get public key
spub = getSigningKeys(safe, profileId)[1]
mpub = getMessageKeys(safe, profileId)[1]
# auth token sent to friend
token = bytes(str(uuid4()), encoding='ascii')
# create our auth token to be sent to server
auth = bytes(sha384(b''.join((uid, token))).hexdigest(), encoding='ascii')
# work out length of data
data = b''.join((token, spub, mpub))
# send length to read and auth token and public keys
writer.write(b''.join((bytes(str(len(data)), encoding='ascii'), WRITE_END, data)))
yield from writer.drain()
# recv back success to confirm storage of sent data by friend
success = yield from reader.readline()
try:
assert int(success[0]) == 49
int(success)
except (KeyError, ValueError):
logging.warning("\t".join(("Friend Request Warning",
"Friendship completion failed. Storage confirmation: {!r}".format(success))))
return b''.join((BFALSE, WRITE_END)), None
port = success[1:-1]
# receive length to read
data = yield from reader.readline()
try:
length = int(data)
except ValueError:
return b''.join((BFALSE, WRITE_END)), None
data = yield from reader.read(length)
fauth, spub, mpub = data[:36], data[36:100], data[100:]
try:
assert len(data) > 115
assert isValidUUID(fauth) is True
except AssertionError:
logging.error("\t".join(("Friend Request Failure",
"Invalid mask or public key provided", "Data: {!r}".format(data))))
return b''.join((BFALSE, WRITE_END)), None
# created and store localised mask of friend's true ID
fmask = setUidMask(safe, profileId, uid)
# store friend's auth mask
# (the mask we use when submitting authorised requests to the hub server regarding this friend)
setFriendAuth(safe, profileId, fmask, fauth, auth)
# store public key for friend
storeAuthority(safe, profileId, fmask, spub, mpub)
# store address locally
setAddress(safe, profileId, fmask,
b':'.join((bytes(writer.transport.get_extra_info('peername')[0], encoding='ascii'), port)))
# delete local friend request storage
delFriendRequests(rowid)
# True for success of all required friendship steps, hash of auth token we sent to friend (must be sent to hub server)
return BTRUE, auth
@asyncio.coroutine
def requestSendFile(safe, profileId, mask, data):
"""
Handle and store request for file transfer
@param safe: crypto box
@param profileId: logged in user's profile ID
@param mask: local friend mask for given friend's user ID
@param data: filename, size, checksum seperated by VALUE_SEPERATOR and user ID
@return: user id, filename, size
"""
try:
filename, size, checksum = data[:-36].split(bytes(PROFILE_VALUE_SEPARATOR, encoding='utf-8'))
except ValueError:
logging.info("Invalid file request data recieved: {!r}".format(data))
return False
checksum = checksum[:-COMMAND_LENGTH]
# validate received data
try:
# sha1 hex length
assert len(checksum) == 40
# size in bytes must be integer
int(size)
except AssertionError:
logging.info("Invalid file request data received, checksum is not correct length: {!r}".format(checksum))
return False
except ValueError:
logging.info("Invalid file request data received, size is not an integer: {!r}".format(size))
return False
# store file transfer request
rowid = storeFileRequest(safe, profileId, outgoing=False, mask=mask, request=(filename, size, checksum))
return data[-36:], filename, size, checksum, rowid
@asyncio.coroutine
def sendFile(writer, safe, profileId, mask, checksum, expiry, blockSize=4098):
"""
Send file to from server to client destination
@param writer: StreamWriter object to client
@param safe: crypto box
@param profileId: logged in user's profile ID
@param mask: local friend mask for given friend's user ID
@param checksum: sha1 sum value of file to be sent
@param expiry: expire days for file transfer requests (config set value)
@param blockSize: total number of bytes to read at once
@return: True when file if completely sent, otherwise False
"""
try:
# obtain current requests for provided mask and clear expired requests
filename, size, rowid = getFileRequests(safe, profileId, outgoing=True, mask=mask, expire=expiry)[mask][checksum]
except KeyError:
logging.warning("\t".join(("File Transfer Failed",
"File transfer request does not exist for mask {} and checksum {}".format(mask, checksum))))
writer.write(NONEXISTANT)
yield from writer.drain()
return False
if not path.isfile(filename):
delFileRequests(rowid)
logging.warning("\t".join(("File Transfer Failed", "File no longer exists: {}".format(filename))))
writer.write(NONEXISTANT)
yield from writer.drain()
return False
# match file checksum to ensure the same file which was to be sent
# has not been modified since the original transfer request
cursum = sha1sum(filename)
if checksum != cursum:
# remove invalid transfer request
delFileRequests(rowid)
logging.warning("\t".join(("File Transfer Failed", "File has been modified",
"Filename: {}".format(filename),
"Original checksum: {}".format(checksum),
"Current checksum: {}".format(cursum))))
writer.write(MODIFIED_FILE)
yield from writer.drain()
return False
blockSize = int(blockSize)
with open(filename, 'rb') as fd:
for buf in iter(partial(fd.read, blockSize), b''):
writer.write(buf)
yield from writer.drain()
# remove file transfer request from storage
delFileRequests(rowid)
return True
@asyncio.coroutine
def receiveAvatar(reader, writer, safe, profileId, mask, checksum):
"""
Receive avatar update check from friend
@param reader: client streamreader object
@param writer: streamwriter object
@param safe: crypto box
@param profileId: logged in user's profile ID
@param mask: friend mask uid
@param checksum: avatar sha1 checksum
@return: '0' if avatar not updated, otherwise locally calculated checksum value of stored avatar
"""
if len(checksum) != 40:
logging.warning("Friend mask '{}' tried to send invalid checksum value: {!r}".format(mask, checksum))
return BFALSE
try:
checksum = checksum.decode('ascii')
except UnicodeDecodeError:
return BFALSE
# compare local checksum value
if checksum != getFriendChecksum(safe, profileId, mask):
writer.write(BTRUE)
yield from writer.drain()
else:
return BFALSE
# get size of avatar to read from friend
size = yield from reader.readline()
try:
size = int(size)
assert size < LIMIT_AVATAR_SIZE
except (ValueError, AssertionError):
logging.warning("Friend mask '{}' tried to send invalid avatar size value: {!r}".format(mask, size))
return BFALSE
writer.write(BTRUE)
yield from writer.drain()
# read avatar into memory
avatar = yield from reader.readexactly(size)
# store avatar
storedChecksum = updateFriendDetails(safe, profileId, mask, avatar=avatar)
# send locally calculated checksum value as verification of storage
return storedChecksum
@asyncio.coroutine
def receiveMessage(safe, profileId, mask, data):
"""
Process data as recieved message
@param data: bytes/bytestring of msg and uid sent by client
@return: (user id, received message) if receive message exists, else False
"""
# msg portion of data
msg = data[:-36 - COMMAND_LENGTH]
rowid = storeHistory(safe, profileId, mask, msg, fromFriend=True)
# uid, msg
return (rowid, data[-36:], msg) if msg else False
#######################
# P2P Client Handlers
#######################
@asyncio.coroutine
def inviteChat():
pass
|
gpl-3.0
| 8,537,975,647,144,394,000
| 35.492013
| 127
| 0.651112
| false
| 4.276301
| false
| false
| false
|
t-hey/QGIS-Original
|
python/plugins/processing/algs/qgis/ExtendLines.py
|
1
|
2995
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ExtendLines.py
--------------------
Date : October 2016
Copyright : (C) 2016 by Nyall Dawson
Email : nyall dot dawson at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Nyall Dawson'
__date__ = 'October 2016'
__copyright__ = '(C) 2016, Nyall Dawson'
# This will get replaced with a git SHA1 when you do a git archive323
__revision__ = '$Format:%H$'
from qgis.core import (QgsProcessingParameterNumber,
QgsProcessingException,
QgsProcessing)
from processing.algs.qgis.QgisAlgorithm import QgisFeatureBasedAlgorithm
class ExtendLines(QgisFeatureBasedAlgorithm):
START_DISTANCE = 'START_DISTANCE'
END_DISTANCE = 'END_DISTANCE'
def group(self):
return self.tr('Vector geometry')
def __init__(self):
super().__init__()
self.start_distance = None
self.end_distance = None
def initParameters(self, config=None):
self.addParameter(QgsProcessingParameterNumber(self.START_DISTANCE,
self.tr('Start distance'), defaultValue=0.0))
self.addParameter(QgsProcessingParameterNumber(self.END_DISTANCE,
self.tr('End distance'), defaultValue=0.0))
def name(self):
return 'extendlines'
def displayName(self):
return self.tr('Extend lines')
def outputName(self):
return self.tr('Extended')
def inputLayerTypes(self):
return [QgsProcessing.TypeVectorLine]
def prepareAlgorithm(self, parameters, context, feedback):
self.start_distance = self.parameterAsDouble(parameters, self.START_DISTANCE, context)
self.end_distance = self.parameterAsDouble(parameters, self.END_DISTANCE, context)
return True
def processFeature(self, feature, context, feedback):
input_geometry = feature.geometry()
if input_geometry:
output_geometry = input_geometry.extendLine(self.start_distance, self.end_distance)
if not output_geometry:
raise QgsProcessingException(
self.tr('Error calculating extended line'))
feature.setGeometry(output_geometry)
return feature
|
gpl-2.0
| 7,437,769,807,170,386,000
| 36.4375
| 100
| 0.535225
| false
| 4.983361
| false
| false
| false
|
studywolf/blog
|
InvKin/Arm.py
|
1
|
7959
|
'''
Copyright (C) 2013 Travis DeWolf
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import numpy as np
import scipy.optimize
class Arm3Link:
def __init__(self, q=None, q0=None, L=None):
"""Set up the basic parameters of the arm.
All lists are in order [shoulder, elbow, wrist].
q : np.array
the initial joint angles of the arm
q0 : np.array
the default (resting state) joint configuration
L : np.array
the arm segment lengths
"""
# initial joint angles
self.q = [.3, .3, 0] if q is None else q
# some default arm positions
self.q0 = np.array([np.pi/4, np.pi/4, np.pi/4]) if q0 is None else q0
# arm segment lengths
self.L = np.array([1, 1, 1]) if L is None else L
self.max_angles = [np.pi, np.pi, np.pi/4]
self.min_angles = [0, 0, -np.pi/4]
def get_xy(self, q=None):
"""Returns the corresponding hand xy coordinates for
a given set of joint angle values [shoulder, elbow, wrist],
and the above defined arm segment lengths, L
q : np.array
the list of current joint angles
returns : list
the [x,y] position of the arm
"""
if q is None:
q = self.q
x = self.L[0]*np.cos(q[0]) + \
self.L[1]*np.cos(q[0]+q[1]) + \
self.L[2]*np.cos(np.sum(q))
y = self.L[0]*np.sin(q[0]) + \
self.L[1]*np.sin(q[0]+q[1]) + \
self.L[2]*np.sin(np.sum(q))
return [x, y]
def inv_kin(self, xy):
"""This is just a quick write up to find the inverse kinematics
for a 3-link arm, using the SciPy optimize package minimization
function.
Given an (x,y) position of the hand, return a set of joint angles (q)
using constraint based minimization, constraint is to match hand (x,y),
minimize the distance of each joint from it's default position (q0).
xy : tuple
the desired xy position of the arm
returns : list
the optimal [shoulder, elbow, wrist] angle configuration
"""
def distance_to_default(q, *args):
"""Objective function to minimize
Calculates the euclidean distance through joint space to the
default arm configuration. The weight list allows the penalty of
each joint being away from the resting position to be scaled
differently, such that the arm tries to stay closer to resting
state more for higher weighted joints than those with a lower
weight.
q : np.array
the list of current joint angles
returns : scalar
euclidean distance to the default arm position
"""
# weights found with trial and error,
# get some wrist bend, but not much
weight = [1, 1, 1.3]
return np.sqrt(np.sum([(qi - q0i)**2 * wi
for qi, q0i, wi in zip(q, self.q0, weight)]))
def x_constraint(q, xy):
"""Returns the corresponding hand xy coordinates for
a given set of joint angle values [shoulder, elbow, wrist],
and the above defined arm segment lengths, L
q : np.array
the list of current joint angles
xy : np.array
current xy position (not used)
returns : np.array
the difference between current and desired x position
"""
x = (self.L[0]*np.cos(q[0]) + self.L[1]*np.cos(q[0]+q[1]) +
self.L[2]*np.cos(np.sum(q))) - xy[0]
return x
def y_constraint(q, xy):
"""Returns the corresponding hand xy coordinates for
a given set of joint angle values [shoulder, elbow, wrist],
and the above defined arm segment lengths, L
q : np.array
the list of current joint angles
xy : np.array
current xy position (not used)
returns : np.array
the difference between current and desired y position
"""
y = (self.L[0]*np.sin(q[0]) + self.L[1]*np.sin(q[0]+q[1]) +
self.L[2]*np.sin(np.sum(q))) - xy[1]
return y
def joint_limits_upper_constraint(q, xy):
"""Used in the function minimization such that the output from
this function must be greater than 0 to be successfully passed.
q : np.array
the current joint angles
xy : np.array
current xy position (not used)
returns : np.array
all > 0 if constraint matched
"""
return self.max_angles - q
def joint_limits_lower_constraint(q, xy):
"""Used in the function minimization such that the output from
this function must be greater than 0 to be successfully passed.
q : np.array
the current joint angles
xy : np.array
current xy position (not used)
returns : np.array
all > 0 if constraint matched
"""
return q - self.min_angles
return scipy.optimize.fmin_slsqp(
func=distance_to_default,
x0=self.q,
eqcons=[x_constraint,
y_constraint],
# uncomment to add in min / max angles for the joints
# ieqcons=[joint_limits_upper_constraint,
# joint_limits_lower_constraint],
args=(xy,),
iprint=0) # iprint=0 suppresses output
def test():
# ###########Test it!##################
arm = Arm3Link()
# set of desired (x,y) hand positions
x = np.arange(-.75, .75, .05)
y = np.arange(.25, .75, .05)
# threshold for printing out information, to find trouble spots
thresh = .025
count = 0
total_error = 0
# test it across the range of specified x and y values
for xi in range(len(x)):
for yi in range(len(y)):
# test the inv_kin function on a range of different targets
xy = [x[xi], y[yi]]
# run the inv_kin function, get the optimal joint angles
q = arm.inv_kin(xy=xy)
# find the (x,y) position of the hand given these angles
actual_xy = arm.get_xy(q)
# calculate the root squared error
error = np.sqrt(np.sum((np.array(xy) - np.array(actual_xy))**2))
# total the error
total_error += np.nan_to_num(error)
# if the error was high, print out more information
if np.sum(error) > thresh:
print('-------------------------')
print('Initial joint angles', arm.q)
print('Final joint angles: ', q)
print('Desired hand position: ', xy)
print('Actual hand position: ', actual_xy)
print('Error: ', error)
print('-------------------------')
count += 1
print('\n---------Results---------')
print('Total number of trials: ', count)
print('Total error: ', total_error)
print('-------------------------')
if __name__ == '__main__':
test()
|
gpl-3.0
| 4,691,724,446,632,460,000
| 34.373333
| 79
| 0.547305
| false
| 4.128112
| false
| false
| false
|
Mansilla1/Sistema-SEC
|
apps/usuarios/views.py
|
1
|
14404
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Código desarrollado por Daniel Mansilla
from django.shortcuts import render, render_to_response, redirect
from django.views.generic import ListView, CreateView, DetailView, DeleteView, UpdateView, TemplateView
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseBadRequest
from django.core.urlresolvers import reverse_lazy
from django.core import serializers
from django.contrib.auth.models import User
from django.contrib.auth import login, logout
from django.utils import timezone
from django.template import RequestContext
# import django_excel as excel
from django.contrib import messages
from django.core.mail import send_mail
from django.conf import settings
import openpyxl
from openpyxl import Workbook
from openpyxl.styles import Font
from io import BytesIO
from Proyecto.utilities import generate_pdf
from random import randrange, uniform
import random
import requests
import ast
from django.db.models import Count
# from django.utils import simplejson
# import simplejson
from .forms import *
from .models import *
def login_user(request):
template_name = 'login/login.html'
logout(request)
username = password = ''
request.session['token'] = None
if request.POST:
post_data = {'username': request.POST["username"],'password':request.POST["password"]}
response = requests.post('http://cubesoa.asuscomm.com:90/rest-auth/login/', data=post_data)
content = response.content
content = ast.literal_eval(content)
if "key" in content:
post_data2 = {'username': str(request.POST["username"])}
header = {'Content-Type':'application/json','Authorization':'Token ' + content['key']}
response2 = requests.get('http://cubesoa.asuscomm.com:90/rest-auth/user/',headers=header, data=post_data2)
content2 = response2.content
content2 = ast.literal_eval(content2)
request.session["pk"] = content2['pk']
request.session["first_name"] = content2['first_name']
request.session["last_name"] = content2['last_name']
request.session["email"] = content2['email']
request.session["token"] = content['key']
return HttpResponseRedirect(reverse_lazy('inicio'))
# elif
# return redirect('inicio')
return render(request, template_name, {})
# def index(request):
# # perfil_user = PerfilUsuario.objects.get(user__id=request.user.id)
# # usuarios = PerfilUsuario.objects.all().count()
# # contenidos = ProfeSesion.objects.filter(status=True).count()
# # preguntas = Pregunta.objects.filter(status=True).count()
# # evaluaciones = Evaluacion.objects.filter(disponible=True).count()
# # usuario_registrados = PerfilUsuario.objects.all().order_by('created_at')[:5].reverse()
# # ------------------------------------------
# # OBTENER RANKINGS
# # ------------------------------------------
# user_pregunta = User.objects.exclude(perfilusuario__tipo_usuario='Estudiante').annotate(preguntas=Count('pregunta')).order_by('-preguntas')[:5]
# user_evaluacion = User.objects.exclude(perfilusuario__tipo_usuario='Estudiante').annotate(evaluaciones=Count('evaluacion')).order_by('-evaluaciones')[:5]
# orden_preguntas = Pregunta.objects.all().order_by('-cant_usada')[:5]
# context = {
# 'user_pregunta': user_pregunta,
# 'user_evaluacion': user_evaluacion,
# 'orden_preguntas': orden_preguntas,
# # 'perfil_user': perfil_user,
# # 'usuarios': usuarios,
# # 'preguntas': preguntas,
# # 'contenidos': contenidos,
# # 'evaluaciones': evaluaciones,
# # 'usuario_registrados': usuario_registrados,
# }
# return render(request, 'index.html', context)
def usuarioList(request):
usuario = PerfilUsuario.objects.all()
context = {
'usuario': usuario,
}
return render(request, 'apps/usuarios/usuario_list.html', context)
def usuarioCreate(request):
if request.POST:
form = PerfilForm(request.POST)
form2 = RegistroForm(request.POST)
if form.is_valid() and form2.is_valid():
form2 = form2.save(commit=False)
form2.save()
form = form.save(commit=False)
form.user = form2
form.save()
#Obtener el nombre de usuario
user=form2.username
nombre = form.nombres + ' ' +form.apellido1 + ' ' +form.apellido2
contrasena = 'unab2020'
correo = form.email
tipouser = form.tipo_usuario
subject = 'Bienvenido al Portal SEC!'
message = 'Hola %s!\nusuario: %s, password: %s' % (nombre, user, contrasena)
send_mail(
subject,
message,
settings.EMAIL_HOST_USER,
[correo],
fail_silently=False,
)
return redirect('usuarios:listar')
else:
form = PerfilForm()
form2 = RegistroForm()
context = {
'form': form,
'form2': form2,
}
return render(request, 'apps/usuarios/usuario_create.html', context)
def usuarioUpdate(request, usuario_id):
# usuario = User.objects.get(id=usuario_id)
# id_user = int(usuario.id)
perfil = PerfilUsuario.objects.get(user=usuario_id)
if request.method == 'GET':
# form = RegistroForm(instance=usuario)
form = PerfilForm(instance=perfil)
else:
# form = RegistroForm(request.POST, instance=usuario)
form = PerfilForm(request.POST, instance=perfil)
if form.is_valid():
form.save()
# form2 = form2.save(commit=False)
# form2.user = usuario
# form2.save()
return redirect('usuarios:listar')
context = {
'form': form,
# 'form2': form2,
'perfil': perfil,
}
return render(request, 'apps/usuarios/usuario_update.html', context)
# class UsuarioDetail(DetailView):
# model = PerfilUsuario
# template_name = 'apps/usuarios/usuario_detail.html'
# context_object_name = 'usuario'
# def get_context_data(self, **kwargs):
# context = super(UsuarioDetail, self).get_context_data(**kwargs)
# context['title'] = 'Detalle de usuario'
# return context
def usuarioDelete(request, usuario_id):
usuario = User.objects.get(id=usuario_id)
if request.method == 'POST':
usuario.delete()
return redirect('usuarios:listar')
return render(request, 'apps/usuarios/usuario_delete.html', {'usuario':usuario})
#Planillas excel
def get_planilla_usuario(request):
#generar excel
wb = Workbook()
# ws = wb.create_sheet("Calificaciones",0)
ws = wb.active
ws.title = 'Usuarios'
# ws.font = ws.font.copy(bold=True, italic=True)
# Cabeceras
a1 = ws.cell(row=1, column=1, value='RUN')
a2 = ws.cell(row=1, column=2, value='Nombres')
a3 = ws.cell(row=1, column=3, value='Apellido Paterno')
a4 = ws.cell(row=1, column=4, value='Apellido Materno')
a5 = ws.cell(row=1, column=5, value='Email')
a6 = ws.cell(row=1, column=6, value='Usuario')
a7 = ws.cell(row=1, column=7, value='Tipo Usuario')
# a7 = ws.cell(row=1, column=7, value='¿Coordinador de asignatura? (si/no)')
a1.font = Font(bold=True)
a2.font = Font(bold=True)
a3.font = Font(bold=True)
a4.font = Font(bold=True)
a5.font = Font(bold=True)
a6.font = Font(bold=True)
a7.font = Font(bold=True)
nombre_archivo = 'Planilla_usuarios.xlsx'
response = HttpResponse(content_type="application/ms-excel")
contenido = "attachment; filename={0}".format(nombre_archivo)
response["Content-Disposition"] = contenido
wb.save(response)
return response
def upload(request):
if request.POST:
# iniciar excel
excel = request.FILES['archivo'].read() # capturar archivo
wb = openpyxl.load_workbook(filename=BytesIO(excel)) # iniciar archivo
hojas = wb.get_sheet_names() # capturar nombre de hojas del archivo
hoja = wb.get_sheet_by_name(hojas[0]) #utilizar la primera hoja del documento
total_filas = hoja.max_row # capturar valor maximo de filas a leer
total_columnas = hoja.max_column # capturar valor maximo de columnas
user_no_register = []
#loop de lectura y escritura
for i in range(2, total_filas+1):
form = PerfilForm() #formulario de perfil usuario
form2 = RegistroForm(request.POST or None) # formulario registro usuario
# form3 = ProfesorForm()
# form3_2 = CoordinadorForm()
# form4 = EstudianteForm()
#probar existencia de usuario
username = hoja.cell(row=i,column=6).value
print username
try:
usuario = User.objects.get(username=username)
usuario = usuario.username
print 'usuario ya existe'
user_no_register += [usuario]
except:
rut = hoja.cell(row=i,column=1).value
nombre = hoja.cell(row=i,column=2).value
apellido1 = hoja.cell(row=i,column=3).value
apellido2 = hoja.cell(row=i,column=4).value
correo = hoja.cell(row=i,column=5).value
usuario = hoja.cell(row=i,column=6).value
tipo_usuario = hoja.cell(row=i,column=7).value
nombre = nombre.capitalize()
apellido1 = apellido1.capitalize()
apellido2 = apellido2.capitalize()
tipo_usuario = tipo_usuario.capitalize()
if tipo_usuario == 'Comité académico' or tipo_usuario == 'Comite académico' or tipo_usuario == 'Comité academico':
tipo_usuario = 'Comite academico'
print tipo_usuario
# numero_random = randrange(100,999)
# contrasena = "%s%s%s%s" % (nombre[0].capitalize(),numero_random, apellido[:2], numero_random)
contrasena = "unab2020"
# form2.set_password(self.cleaned_data["password1"])
# form2.set_password(self.cleaned_data["password2"])
form2 = form2.save(commit=False)
form2.username = usuario
# form2.first_name = nombre
# form2.last_name = apellido
# form2.email = correo
form2.password1 = contrasena
form2.password2 = contrasena
form2.save()
form = form.save(commit=False)
form.rut = rut
form.nombres = nombre
form.apellido1 = apellido1
form.apellido2 = apellido2
form.email = correo
form.tipo_usuario = tipo_usuario
form.user = form2
form.save()
# if form.tipo_usuario == 'Docente':
# form3 = form3.save(commit=False)
# form3.usuario = form
# form3.save()
# # if coordinador=='si' or coordinador=='SI' or coordinador=='Si' or coordinador=='sI':
# # form3_2 = form3_2.save(commit=False)
# # form3_2.profesor = form
# # form3_2.save()
# elif form.tipo_usuario == 'Estudiante':
# form4 = form4.save(commit=False)
# form4.usuario = form
# form4.save()
#Obtener el nombre de usuario
user =form2.username
nombre = "%s %s %s" %(form.nombres, form.apellido1, form.apellido2)
correo = form.email
tipouser = form.tipo_usuario
subject = 'Bienvenido al Portal SEC!'
message = 'usuario: %s, password %s' % (user, contrasena)
send_mail(
subject,
message,
settings.EMAIL_HOST_USER,
[correo],
fail_silently=False,
)
print user_no_register
return redirect('usuarios:listar')
else:
form = PerfilForm()
form2 = RegistroForm()
# form3 = ProfesorForm()
# # form3_2 = CoordinadorForm()
# form4 = EstudianteForm()
context = {
'form': form,
'form2': form2,
# 'form3': form3,
# # 'form3_2': form3_2,
# 'form4': form4,
}
return render(request, 'apps/usuarios/usuario_upload.html', context)
# ESTUDIANTES
def estudiante_list(request):
estudiantes = Estudiante.objects.all()
return render(request, 'apps/usuarios/estudiantes_list.html', {'estudiantes': estudiantes})
def estudiante_create(request):
if request.POST:
form = EstudianteForm(request.POST)
if form.is_valid():
form.save()
return redirect('usuarios:listar_estudiantes')
else:
form = EstudianteForm()
context = {'form': form}
return render(request, 'apps/usuarios/estudiante_create.html', context)
def upload_estudiante(request):
if request.POST:
# iniciar excel
excel = request.FILES['archivo'].read() # capturar archivo
wb = openpyxl.load_workbook(filename=BytesIO(excel)) # iniciar archivo
hojas = wb.get_sheet_names() # capturar nombre de hojas del archivo
hoja = wb.get_sheet_by_name(hojas[0]) #utilizar la primera hoja del documento
total_filas = hoja.max_row # capturar valor maximo de filas a leer
total_columnas = hoja.max_column # capturar valor maximo de columnas
user_no_register = []
#loop de lectura y escritura
for i in range(2, total_filas+1):
form = EstudianteForm() #formulario de perfil usuario
# probar existencia de estudiante
rut = hoja.cell(row=i,column=1).value
estudiante_no_register = []
try:
estudiante = Estudiante.objects.get(rut=rut)
print 'estudiante ya existe'
estudiante_no_register += [estudiante]
except:
rut = hoja.cell(row=i,column=1).value
nombres = hoja.cell(row=i,column=2).value
apellido1 = hoja.cell(row=i,column=3).value
apellido2 = hoja.cell(row=i,column=4).value
correo = hoja.cell(row=i,column=5).value
nombre2 = ''
nombre3 = ''
nombres = nombres.capitalize()
apellido1 = apellido1.capitalize()
apellido2 = apellido2.capitalize()
form = form.save(commit=False)
form.rut = rut
form.nombre1 = nombres
form.nombre2 = nombre2
form.nombre3 = nombre3
form.apellido1 = apellido1
form.apellido2 = apellido2
form.email = correo
form.save()
print estudiante_no_register
return redirect('usuarios:listar_estudiantes')
else:
form = EstudianteForm()
context = {
'form': form,
}
return render(request, 'apps/usuarios/estudiante_upload.html', context)
def get_planilla_estudiante(request):
#generar excel
wb = Workbook()
# ws = wb.create_sheet("Calificaciones",0)
ws = wb.active
ws.title = 'Estudiantes'
# ws.font = ws.font.copy(bold=True, italic=True)
# Cabeceras
a1 = ws.cell(row=1, column=1, value='RUN')
a2 = ws.cell(row=1, column=2, value='Nombres')
a3 = ws.cell(row=1, column=3, value='Apellido Paterno')
a4 = ws.cell(row=1, column=4, value='Apellido Materno')
a5 = ws.cell(row=1, column=5, value='Email')
a1.font = Font(bold=True)
a2.font = Font(bold=True)
a3.font = Font(bold=True)
a4.font = Font(bold=True)
a5.font = Font(bold=True)
nombre_archivo = 'Planilla_estudiantes.xlsx'
response = HttpResponse(content_type="application/ms-excel")
contenido = "attachment; filename={0}".format(nombre_archivo)
response["Content-Disposition"] = contenido
wb.save(response)
return response
# AJAX
class GetEstudiantes(TemplateView):
def get(self, request, *args, **kwargs):
estudiante = Estudiante.objects.all()
print estudiante
data = serializers.serialize('json', estudiante)
return HttpResponse(data, content_type="application/json")
|
apache-2.0
| 1,000,900,255,023,441,300
| 29.313684
| 156
| 0.682317
| false
| 2.691215
| false
| false
| false
|
svenstaro/python-web-boilerplate
|
boilerplateapp/models/user.py
|
1
|
2930
|
"""Module containing the `User` model."""
import uuid
import secrets
import string
from datetime import datetime
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy_utils.models import Timestamp
from flask import current_app
from boilerplateapp.extensions import db, passlib
class User(db.Model, Timestamp):
"""User model."""
id = db.Column(UUID(as_uuid=True), primary_key=True, nullable=False, default=uuid.uuid4)
email = db.Column(db.String(120), unique=True, nullable=False, index=True)
password_hash = db.Column(db.String(120), nullable=False)
current_auth_token = db.Column(db.String(32), index=True)
last_action = db.Column(db.DateTime)
def __init__(self, email, password):
"""Construct a `User`.
Accepts an `email` and a `password`. The password is securely hashed
before being written to the database.
"""
self.email = email
self.set_password(password)
def __repr__(self):
"""Format a `User` object."""
return '<User {email}>'.format(email=self.email)
def set_password(self, new_password):
"""Hash a given `new_password` and write it into the `User.password_hash` attribute.
It does not add this change to the session not commit the transaction!
"""
self.password_hash = passlib.pwd_context.hash(new_password)
def verify_password(self, candidate_password):
"""Verify a given `candidate_password` against the password hash stored in the `User`.
Returns `True` if the password matches and `False` if it doesn't.
"""
return passlib.pwd_context.verify(candidate_password, self.password_hash)
def generate_auth_token(self):
"""Generate an auth token and save it to the `current_auth_token` column."""
alphabet = string.ascii_letters + string.digits
new_auth_token = ''.join(secrets.choice(alphabet) for i in range(32))
self.current_auth_token = new_auth_token
self.last_action = datetime.utcnow()
db.session.add(self)
db.session.commit()
return new_auth_token
@property
def has_valid_auth_token(self):
"""Return whether or not the user has a valid auth token."""
latest_valid_date = datetime.utcnow() - current_app.config['AUTH_TOKEN_TIMEOUT']
return (self.last_action and
self.last_action > latest_valid_date and
self.current_auth_token)
@staticmethod
def get_user_from_login_token(token):
"""Get a `User` from a login token.
A login token has this format:
<user uuid>:<auth token>
"""
user_id, auth_token = token.split(':')
user = db.session.query(User).get(user_id)
if user and user.current_auth_token:
if secrets.compare_digest(user.current_auth_token, auth_token):
return user
return None
|
mit
| 2,480,630,203,031,518,700
| 35.17284
| 94
| 0.646075
| false
| 3.943472
| false
| false
| false
|
fdslight/fdslight
|
pywind/web/handlers/websocket.py
|
1
|
8933
|
#!/usr/bin/env python3
import pywind as tcp_handler
import pywind.web.lib.websocket as websocket
import pywind.web.lib.httputils as httputils
import socket, time
class ws_listener(tcp_handler.tcp_handler):
def init_func(self, creator, listen, is_ipv6=False):
if is_ipv6:
fa = socket.AF_INET6
else:
fa = socket.AF_INET
s = socket.socket(fa, socket.SOCK_STREAM)
if is_ipv6: s.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.set_socket(s)
self.bind(listen)
return self.fileno
def after(self):
self.listen(10)
self.register(self.fileno)
self.add_evt_read(self.fileno)
def tcp_accept(self):
while 1:
try:
cs, caddr = self.accept()
except BlockingIOError:
break
self.ws_accept(cs,caddr)
''''''
def ws_accept(self,cs,caddr):
"""重写这个方法
:param cs:客户端套接字对象
:param caddr:客户端地址
:return:
"""
pass
def tcp_delete(self):
self.ws_release()
self.unregister(self.fileno)
self.close()
def ws_release(self):
"""重写这个方法
:return:
"""
class ws_handler(tcp_handler.tcp_handler):
__conn_timeout = 60
__caddr = None
__encoder = None
__decoder = None
__is_handshake = None
__LOOP_TIMEOUT = 20
__update_time = 0
# 自定义的握手响应头
__ext_handshake_resp_headers = None
__is_close = False
__is_sent_ping = False
def init_func(self, creator, cs, caddr):
self.__caddr = caddr
self.__decoder = websocket.decoder(server_side=True)
self.__encoder = websocket.encoder(server_side=True)
self.__is_handshake = False
self.__ext_handshake_resp_headers = []
self.__is_close = False
self.set_socket(cs)
self.register(self.fileno)
self.add_evt_read(self.fileno)
self.set_timeout(self.fileno, self.__LOOP_TIMEOUT)
self.ws_init()
return self.fileno
def ws_init(self):
"""重写这个方法
:return:
"""
pass
@property
def caddr(self):
return self.__caddr
def response_error(self):
resp_sts = httputils.build_http1x_resp_header("400 Bad Request", [("Sec-WebSocket-Version", 13), ],
version="1.1")
self.writer.write(resp_sts.encode("iso-8859-1"))
self.add_evt_write(self.fileno)
self.delete_this_no_sent_data()
def __do_handshake(self, byte_data):
try:
sts = byte_data.decode("iso-8859-1")
except UnicodeDecodeError:
self.response_error()
return False
try:
rs = httputils.parse_htt1x_request_header(sts)
except:
self.response_error()
return False
req, headers = rs
dic = {}
for k, v in headers:
k = k.lower()
dic[k] = v
if "sec-websocket-key" not in dic: return False
ws_version = dic.get("sec-websocket-version", 0)
is_err = False
try:
ws_version = int(ws_version)
if ws_version != 13: is_err = True
except ValueError:
is_err = True
if is_err:
self.response_error()
return False
if not self.on_handshake(req, headers):
self.response_error()
return False
sec_ws_key = dic["sec-websocket-key"]
resp_sec_key = websocket.gen_handshake_key(sec_ws_key)
resp_headers = [("Upgrade", "websocket"), ("Connection", "Upgrade"), ("Sec-WebSocket-Accept", resp_sec_key)]
resp_headers += self.__ext_handshake_resp_headers
resp_sts = httputils.build_http1x_resp_header("101 Switching Protocols", resp_headers, version="1.1")
self.writer.write(resp_sts.encode("iso-8859-1"))
self.add_evt_write(self.fileno)
return True
def __handle_ping(self, message):
self.__send_pong(message)
def __handle_pong(self):
self.__is_sent_ping = False
self.__update_time = time.time()
def __handle_close(self):
if not self.__is_close:
self.ws_close()
return
self.delete_handler(self.fileno)
def __send_ping(self):
wrap_msg = self.__encoder.build_ping()
self.__is_sent_ping = True
self.__update_time = time.time()
self.writer.write(wrap_msg)
self.add_evt_write(self.fileno)
def __send_pong(self, message):
wrap_msg = self.__encoder.build_pong(message)
self.__update_time = time.time()
self.writer.write(self.fileno)
self.add_evt_write(wrap_msg)
def on_handshake(self, request, headers):
"""重写这个方法
:param request:
:param headers:
:return Boolean: False表示握手不允许,True表示握手允许
"""
return True
def set_handshake_resp_header(self, name, value):
"""设置额外的响应头
:param name:
:param value:
:return:
"""
self.__ext_handshake_resp_headers.append((name, value,))
def set_ws_timeout(self, timeout):
self.__conn_timeout = int(timeout)
if self.__conn_timeout < 1: raise ValueError("wrong timeout value")
def tcp_readable(self):
rdata = self.reader.read()
if not self.__is_handshake:
if not self.__do_handshake(rdata): return
self.__is_handshake = True
return
self.__decoder.input(rdata)
while self.__decoder.continue_parse():
self.__decoder.parse()
if not self.__decoder.can_read_data(): continue
data = self.__decoder.get_data()
self.__handle_readable(data, self.__decoder.fin, self.__decoder.rsv, self.__decoder.opcode,
self.__decoder.frame_ok())
if self.__decoder.frame_ok(): self.__decoder.reset()
self.__update_time = time.time()
return
def __handle_readable(self, message, fin, rsv, opcode, frame_finish):
"""
:param message:
:param fin:
:param rsv:
:param opcode:
:param frame_finish:
:return:
"""
if opcode == websocket.OP_CLOSE:
self.__handle_close()
return
if opcode == websocket.OP_PING:
self.__handle_ping(message)
return
if opcode == websocket.OP_PONG:
self.__handle_pong()
return
if not message: return
if message: self.ws_readable(message, fin, rsv, opcode, frame_finish)
def tcp_writable(self):
self.remove_evt_write(self.fileno)
def tcp_error(self):
self.delete_handler(self.fileno)
def tcp_delete(self):
self.ws_release()
self.unregister(self.fileno)
self.close()
def tcp_timeout(self):
if not self.__is_handshake:
self.delete_handler(self.fileno)
return
t = time.time()
if t - self.__update_time >= self.__conn_timeout:
if self.__is_close or self.__is_sent_ping:
self.delete_handler(self.fileno)
return
self.__send_ping()
self.set_timeout(self.fileno, self.__LOOP_TIMEOUT)
def sendmsg(self, msg, fin, rsv, opcode):
"""发送websocket消息
:param msg:
:return:
"""
if opcode in (0x8, 0x9, 0xa,): raise ValueError("ping,pong,close frame cannot be sent by this function")
if self.__is_close: raise ValueError("the connection is closed,you should not send data")
self.__update_time = time.time()
wrap_msg = self.__encoder.build_frame(msg, fin, rsv, opcode)
self.add_evt_write(self.fileno)
self.writer.write(wrap_msg)
def ws_readable(self, message, fin, rsv, opcode, frame_finish):
"""重写这个方法
:param message:
:param fin:
:param rsv:
:param opcode:
:param is_finish:
:return:
"""
pass
def ws_close(self, code=None):
"""关闭ws连接
:return:
"""
if not code:
code = ""
else:
code = str(code)
wrap_msg = self.__encoder.build_close(code.encode("iso-8859-1"))
self.__is_close = True
self.add_evt_write(self.fileno)
self.writer.write(wrap_msg)
self.__update_time = time.time()
self.delete_this_no_sent_data()
def ws_release(self):
"""重写这个方法
:return:
"""
pass
|
bsd-2-clause
| -8,273,594,278,908,964,000
| 25.149254
| 116
| 0.545953
| false
| 3.66946
| false
| false
| false
|
broomyocymru/ditto
|
ditto/core/cache.py
|
1
|
1389
|
import glob
import os
import shutil
import uuid
from os.path import expanduser
import requests
from ditto.core import logger
def setup():
global session_uuid
session_uuid = str(uuid.uuid1())
def cleanup():
shutil.rmtree(get_cache_dir(), True)
def get_session_uuid():
return session_uuid
def get_file(file_path):
if file_path.startswith('http'):
fname = file_path.split('/')[-1]
if not os.path.exists(get_cache_dir()):
os.makedirs(get_cache_dir())
local_path = os.path.abspath(get_cache_dir() + '/' + fname)
r = requests.get(file_path, stream=True)
if r.status_code == 200:
with open(local_path, 'wb') as f:
shutil.copyfileobj(r.raw, f)
del r
else:
logger.error("Download failed (" + file_path + ")")
file_path = local_path
else:
file_paths = glob.glob(file_path)
if len(file_paths) > 1:
logger.warn("More than 1 file found, taking first")
if len(file_paths) == 0:
logger.error("File not found (" + file_path + ")")
file_path = os.path.abspath(file_paths[0])
return file_path
def get_cache_dir():
cache_dir = os.path.abspath(os.path.join(expanduser("~"), ".ditto_cache"))
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
return cache_dir
|
mit
| -2,744,009,419,647,377,400
| 21.419355
| 78
| 0.587473
| false
| 3.481203
| false
| false
| false
|
jemofthewest/GalaxyMage
|
src/Sound.py
|
1
|
2427
|
# Copyright (C) 2005 Colin McMillen <mcmillen@cs.cmu.edu>
#
# This file is part of GalaxyMage.
#
# GalaxyMage is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# GalaxyMage is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GalaxyMage; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
# You may import this module only after the pygame mixer module has
# been initialized.
import Resources
import pygame
_quiet = False
_mixerInit = pygame.mixer.get_init() != None
if _mixerInit:
_cursorChannel = pygame.mixer.Channel(0)
_actionChannel = pygame.mixer.Channel(1)
else:
_quiet = True
_cursorChannel = None
_actionChannel = None
def _play(channel, sound):
if not _mixerInit:
return
if not _quiet and sound != None:
channel.play(sound)
def setQuiet(quiet):
global _quiet
if not _mixerInit:
return
_quiet = quiet
if _quiet:
pygame.mixer.pause()
pygame.mixer.music.pause()
else:
pygame.mixer.unpause()
pygame.mixer.music.unpause()
def toggleQuiet():
setQuiet(not _quiet)
def playMusic(musicName):
"""Changes background music."""
if not _mixerInit:
return
if not _quiet:
Resources.music(musicName)
def playTune(tuneName):
"""Plays a short tune. Returns whether it was actually played."""
if _mixerInit and not _quiet:
Resources.music(tuneName, loop=False)
return True
else:
return False
def cursorClick():
s = Resources.sound("cursor-click")
_play(_cursorChannel, s)
def cursorCancel():
s = Resources.sound("cursor-cancel")
_play(_cursorChannel, s)
def cursorMove():
s = Resources.sound("cursor-move")
_play(_cursorChannel, s)
def cursorInvalid():
s = Resources.sound("cursor-invalid")
_play(_cursorChannel, s)
def action(sound):
s = Resources.sound(sound)
_play(_actionChannel, s)
|
gpl-2.0
| -3,338,352,428,866,749,400
| 23.765306
| 70
| 0.679852
| false
| 3.574374
| false
| false
| false
|
JohanComparat/pySU
|
galaxy/python/lineListAir.py
|
1
|
4082
|
"""
Script loading the atomic properties from the pyNEB package.
Mostly line transitions.
Input to the line fitting procedures
"""
import numpy as n
from scipy.interpolate import interp1d
import pyneb as pn
# Conversion from Morton (1991, ApJS, 77, 119) wavelength in Angstrom
# SDSS spectra are in the vacuum, therefore the ref wavelengths of the lines must be in the vacuum.
AIR = lambda VAC : VAC / (1.0 + 2.735182e-4 + 131.4182 / VAC**2 + 2.76249e8 / VAC**4)
vacs=n.arange(1000,12000,0.01)
airs=AIR(vacs)
VAC = interp1d(airs,vacs)
# Wavelengths from pyNeb Atoms are in A in vacuum like the SDSS spectra. No need to convert.
C3 = pn.Atom('C', 3)
#C3.printIonic()
C3_1908=AIR(1/(C3.getEnergy(C3.getTransition(1908)[0])-C3.getEnergy(C3.getTransition(1908)[1])))
C4 = pn.Atom('C', 4)
#C4.printIonic()
C4_1548=AIR(1/(C4.getEnergy(C4.getTransition(1548)[0])-C4.getEnergy(C4.getTransition(1548)[1])))
O2 = pn.Atom('O', 2)
#O2.printIonic()
O2_3727=AIR(1/(O2.getEnergy(O2.getTransition(3727)[0])-O2.getEnergy(O2.getTransition(3727)[1])))
O2_3729=AIR(1/(O2.getEnergy(O2.getTransition(3729)[0])-O2.getEnergy(O2.getTransition(3729)[1])))
#O2=AIR((O2_3727+O2_3729)/2.)
O2_mean=(O2_3727*3.326568+O2_3729*3.324086)/(3.326568 + 3.324086)
Ne3 = pn.Atom('Ne',3)
#Ne3.printIonic()
Ne3_3869=AIR(1/(Ne3.getEnergy(Ne3.getTransition(3869)[0])-Ne3.getEnergy(Ne3.getTransition(3869)[1])))
Ne3_3968=AIR(1/(Ne3.getEnergy(Ne3.getTransition(3968)[0])-Ne3.getEnergy(Ne3.getTransition(3968)[1])))
O3 = pn.Atom('O', 3)
#O3.printIonic()
O3_4363=AIR(1/(O3.getEnergy(O3.getTransition(4363)[0])-O3.getEnergy(O3.getTransition(4363)[1])))
O3_4960=AIR(1/(O3.getEnergy(O3.getTransition(4960)[0])-O3.getEnergy(O3.getTransition(4960)[1])))
O3_5007=AIR(1/(O3.getEnergy(O3.getTransition(5007)[0])-O3.getEnergy(O3.getTransition(5007)[1])))
O1 = pn.Atom('O', 1)
O1_5578=AIR(1/(O1.getEnergy(O1.getTransition(5578)[0])-O1.getEnergy(O1.getTransition(5578)[1])))
O1_6302=AIR(1/(O1.getEnergy(O1.getTransition(6302)[0])-O1.getEnergy(O1.getTransition(6302)[1])))
O1_6365=AIR(1/(O1.getEnergy(O1.getTransition(6365)[0])-O1.getEnergy(O1.getTransition(6365)[1])))
N2 = pn.Atom('N', 2)
#N2.printIonic()
N2_5756=AIR(1/(N2.getEnergy(N2.getTransition(5756)[0])-N2.getEnergy(N2.getTransition(5756)[1])))
N2_6549=AIR(1/(N2.getEnergy(N2.getTransition(6549)[0])-N2.getEnergy(N2.getTransition(6549)[1])))
N2_6585=AIR(1/(N2.getEnergy(N2.getTransition(6585)[0])-N2.getEnergy(N2.getTransition(6585)[1])))
S2 = pn.Atom('S', 2)
#S2.printIonic()
S2_6718=AIR(1/(S2.getEnergy(S2.getTransition(6718)[0])-S2.getEnergy(S2.getTransition(6718)[1])))
S2_6732=AIR(1/(S2.getEnergy(S2.getTransition(6732)[0])-S2.getEnergy(S2.getTransition(6732)[1])))
Ar3 = pn.Atom('Ar', 3)
#Ar3.printIonic()
Ar3_7137=AIR(1/(Ar3.getEnergy(Ar3.getTransition(7137)[0])-Ar3.getEnergy(Ar3.getTransition(7137)[1])))
# Wavelengths from pyNeb RecAtoms are in A in Air like the SDSS spectra. Conversion needed.
H1=pn.RecAtom('H',1) # Hydrogen Balmer series
H1_3970=H1.getWave(7,2)
H1_4102=H1.getWave(6,2)
H1_4341=H1.getWave(5,2)
H1_4862=H1.getWave(4,2)
H1_6564=H1.getWave(3,2)
H1=pn.RecAtom('H',1) # Hydrogen Lyman series
H1_1216=H1.getWave(2,1)
He1=pn.RecAtom('He',1) # Helium
He2=pn.RecAtom('He',2) # Helium
He2_4686=He2.getWave(4,3)
He2_5411=He2.getWave(7,4)
# Limits for the 4000 A fit
#dl4k=150
#intLim4k=n.array([3950-dl4k, 3950, 4050, 4050+dl4k])
#intLim4k=n.array([3600-dl4k, 3600, 4140, 4140+dl4k])
# limits for th eUV luminosities fits
#intLimUV=n.array([2000,2200,3000,3200,3400,3600,4100,4300,4500,4700])
# system at 2360
# cmin1,cmax1=2080.,2240.
#em1=2326.7
#abs1=2343.7
#em2=2365.3
#aTR=2370.
#abs2=2374.3
#abs3=2382.2
#em3=2396.2
# cmin2,cmax2=2400.,2550.
#a0s2360=n.array([em1,abs1,em2,abs2,abs3,em3])
# system at 2600
#em1=2586.1
#em2=2599.6
#aTR=2606.
#abs1=2612.5
#abs2=2626.3
#cmin1,cmax1=2400.,2550.
#cmin2,cmax2=2650.,2770.
#a0s2600=n.array([em1,em2,abs1,abs2])
# system at 2800
#Mga=2795.
#Mgb=2802.
#aTR=2798.
#cmin1,cmax1=2650.,2770.
#cmin2,cmax2=2807., 2840.
#a0s2800=n.array([Mga,Mgb])
# abs2852=3851.9
# cmin2,cmax2=2870.,3000.
|
cc0-1.0
| -7,262,391,922,114,196,000
| 31.141732
| 101
| 0.715091
| false
| 2
| false
| false
| false
|
JulyJ/MindBot
|
mindbot/router.py
|
1
|
2824
|
"""
Module designed to route messages based on strategy pattern.
This module includes class mapper tuple to correlate received from telegram
user command with target command class to run. Additionally, this module
generates help message based on command list.
"""
from typing import Any, Dict
from .command.help.commands import GreetingsCommand, HelpCommand
from .command.nasa.apod import APODCommand
from .command.nasa.asteroid import AsteroidCommand
from .command.nasa.curiosity import CuriosityCommand
from .command.search.google import GoogleCommand
from .command.search.wiki import WikiCommand, RandomCommand
from .command.search.urban import UrbanDictionaryCommand
from .command.search.dictionary import DictionaryCommand
from .command.weather.weather import WeatherCommand
from .command.weather.forecast import ForecastCommand
from .command.exchange.exchange import ExchangeCommand
from .command.remember.rememberall import RememberAll
from .command.remember.searchtag import SearchTagCommand
from .command.comics.xkcd import XkcdCommand
from .command.tools.qrgenerator import QrCommand
from .command.tools.ocr import OcrCommand
from .command.news.hackernews import LatestNewsCommand, TopNewsCommand, BestNewsCommand
from .command.news.canadanews import CanadaStatsCommand
class CommandRouter:
command_class_mapper = (
('/help', HelpCommand),
('/asteroid', AsteroidCommand),
('/start', GreetingsCommand),
('/canadastat', CanadaStatsCommand),
('/oxford', DictionaryCommand),
('/exchange', ExchangeCommand),
('/forecast', ForecastCommand),
('/google', GoogleCommand),
('/search', SearchTagCommand),
('/urban', UrbanDictionaryCommand),
('/weather', WeatherCommand),
('/curiosity', CuriosityCommand),
('/qr', QrCommand),
('/ocr', OcrCommand),
('/apod', APODCommand),
('/wiki', WikiCommand),
('/random', RandomCommand),
('/xkcd', XkcdCommand),
('/latestnews', LatestNewsCommand),
('/topnews', TopNewsCommand),
('/bestnews', BestNewsCommand),
('/remember', RememberAll),
)
@classmethod
def route(cls, message: Dict[str, Any]):
command, _, query = message['text'].partition(' ')
command = command.lower()
if command not in dict(cls.command_class_mapper):
return
command_class = dict(cls.command_class_mapper).get(command, None)
command_instance = command_class(cls, query, message)
return command_instance()
@classmethod
def get_commands_help(cls):
return (
(command, command_class.help_text)
for command, command_class in cls.command_class_mapper
if command_class.help_text is not None
)
|
mit
| -6,548,554,171,890,835,000
| 37.684932
| 87
| 0.695467
| false
| 4.116618
| false
| false
| false
|
althalus/knotcrafters
|
knotdirectory/knotdirectory/knots/models.py
|
1
|
2886
|
from django.db import models
from taggit.managers import TaggableManager
from django.core.urlresolvers import reverse
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth import get_user_model
User = get_user_model()
class Knot(models.Model):
name = models.CharField(max_length=90, help_text="Commonly accepted name for this tie")
other_names = models.TextField(help_text="Is this knot known by other names? One name per line, please", blank=True)
creator_name = models.CharField(max_length=90, help_text="Who should we credit for discovering this tie")
creator = models.ForeignKey('CreatorProfile', blank=True, null=True, editable=False)
notes = models.TextField(help_text="Any other information? Markdown text enabled.", blank=True)
tags = TaggableManager()
date_added = models.DateTimeField(auto_now_add=True)
date_updated = models.DateTimeField(auto_now=True)
photo = models.ImageField(upload_to="knotsimages/%Y/%m/", help_text="A photo of the completed tie.")
def save(self):
if not self.creator:
try:
self.creator = CreatorProfile.objects.get(name=self.creator_name)
except CreatorProfile.DoesNotExist:
cp = CreatorProfile()
cp.name = self.creator_name
cp.save()
self.creator = cp
super(Knot, self).save()
def get_absolute_url(self):
return reverse("knots.detail", args=[self.pk, ])
def __unicode__(self):
return u'%s' % self.name
class Link(models.Model):
knot = models.ForeignKey(Knot)
link = models.URLField(help_text="Link ot the guide")
name = models.CharField(max_length=90, help_text="A descriptive name for this guide")
def __unicode__(self):
return u'Link %s on knot %s' % (self.name, self.knot.name)
class Action(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
user = models.ForeignKey(User)
when = models.DateTimeField(auto_now=True)
what = models.TextField()
def __unicode__(self):
return u'%s: %s %s %s' % (self.when, self.user, self.what, self.content_object)
class CreatorProfile(models.Model):
name = models.CharField(max_length=90)
link_facebook_profile = models.URLField(blank=True)
link_youtube_channel = models.URLField(blank=True)
link_website = models.URLField(blank=True)
email = models.EmailField(blank=True)
user = models.ForeignKey(User, blank=True, null=True)
bio = models.TextField(blank=True, null=True)
def __unicode__(self):
return u'%s' % self.name
def get_absolute_url(self):
return reverse("creators.detail", args=[self.pk, ])
|
mit
| -6,894,886,796,592,600,000
| 37.48
| 120
| 0.682259
| false
| 3.714286
| false
| false
| false
|
LogikSim/LogikSimPython
|
src/debug/pyside_bugs/nonimplemented_virtual_methods.py
|
1
|
1739
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Copyright 2014 The LogikSim Authors. All rights reserved.
Use of this source code is governed by the GNU GPL license that can
be found in the LICENSE.txt file.
Nonimplemented virtual methods can lead to event handling Problems.
Run the script as it is and you will observe the following issues:
- Frame cannot be closed kindly only by killing the process
- Resizing the Program leads to strange error messages:
QPainter::begin: Paint device returned engine == 0, type: 0
Proposed changes:
- Detect unresolvable virtual methods and print appropriate error message
Workaround for this example:
Uncomment:
def sizeHint(self, *args):
return self.rect().size()
"""
import sys
from PySide import QtGui
class TestRect(QtGui.QGraphicsRectItem, QtGui.QGraphicsLayoutItem):
def __init__(self, *args, **kargs):
QtGui.QGraphicsRectItem.__init__(self, *args, **kargs)
QtGui.QGraphicsLayoutItem.__init__(self, *args, **kargs)
self.setRect(0, 0, 200, 100)
def setGeometry(self, rect):
self.setRect(rect)
# def sizeHint(self, *args):
# return self.rect().size()
def add_rect_with_layout(scene):
item1 = TestRect()
item2 = TestRect()
scene.addItem(item1)
scene.addItem(item2)
layout = QtGui.QGraphicsGridLayout()
layout.addItem(item1, 0, 0)
layout.addItem(item2, 0, 1)
form = QtGui.QGraphicsWidget()
form.setLayout(layout)
scene.addItem(form)
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
scene = QtGui.QGraphicsScene()
add_rect_with_layout(scene)
view = QtGui.QGraphicsView()
view.setScene(scene)
view.show()
app.exec_()
|
gpl-3.0
| 7,154,451,987,703,996,000
| 22.186667
| 77
| 0.677976
| false
| 3.570842
| false
| false
| false
|
JMMolenaar/cadnano2.5
|
cadnano/document.py
|
1
|
19668
|
#!/usr/bin/env python
# encoding: utf-8
from operator import itemgetter
import cadnano.util as util
import cadnano.preferences as prefs
from cadnano.cnproxy import ProxyObject, ProxySignal
from cadnano.cnproxy import UndoStack, UndoCommand
from cadnano.strand import Strand
from cadnano.oligo import Oligo
from cadnano.strandset import StrandSet
from cadnano.virtualhelix import VirtualHelix
from cadnano.part import Part
from cadnano.part import HoneycombPart
from cadnano.part import SquarePart
from cadnano import app
class Document(ProxyObject):
"""
The Document class is the root of the model. It has two main purposes:
1. Serve as the parent all Part objects within the model.
2. Track all sub-model actions on its undoStack.
"""
def __init__(self, parent=None):
super(Document, self).__init__(parent)
self._undostack = UndoStack()
self._parts = []
self._assemblies = []
self._controller = None
self._selected_part = None
# the dictionary maintains what is selected
self._selection_dict = {}
# the added list is what was recently selected or deselected
self._selected_changed_dict = {}
app().documentWasCreatedSignal.emit(self)
# end def
### SIGNALS ###
documentPartAddedSignal = ProxySignal(object,
ProxyObject,
name='documentPartAddedSignal') # doc, part
# dict of tuples of objects using the reference as the key,
# and the value is a tuple with meta data
# in the case of strands the metadata would be which endpoints of selected
# e.g. { objectRef: (value0, value1), ...}
documentSelectedChangedSignal = ProxySignal(dict,
name='documentSelectedChangedSignal') # tuples of items + data
documentSelectionFilterChangedSignal = ProxySignal(list,
name='documentSelectionFilterChangedSignal')
documentViewResetSignal = ProxySignal(ProxyObject,
name='documentViewResetSignal')
documentClearSelectionsSignal = ProxySignal(ProxyObject,
name='documentClearSelectionsSignal')
### SLOTS ###
### ACCESSORS ###
def undoStack(self):
"""
This is the actual undoStack to use for all commands. Any children
needing to perform commands should just ask their parent for the
undoStack, and eventually the request will get here.
"""
return self._undostack
def parts(self):
"""Returns a list of parts associated with the document."""
return self._parts
def assemblies(self):
"""Returns a list of assemblies associated with the document."""
return self._assemblies
### PUBLIC METHODS FOR QUERYING THE MODEL ###
def selectedPart(self):
return self._selected_part
def addToSelection(self, obj, value):
self._selection_dict[obj] = value
self._selected_changed_dict[obj] = value
# end def
def removeFromSelection(self, obj):
if obj in self._selection_dict:
del self._selection_dict[obj]
self._selected_changed_dict[obj] = (False, False)
return True
else:
return False
# end def
def clearSelections(self):
"""
Only clear the dictionary
"""
self._selection_dict = {}
# end def
def addStrandToSelection(self, strand, value):
ss = strand.strandSet()
if ss in self._selection_dict:
self._selection_dict[ss][strand] = value
else:
self._selection_dict[ss] = {strand: value}
self._selected_changed_dict[strand] = value
# end def
def removeStrandFromSelection(self, strand):
ss = strand.strandSet()
if ss in self._selection_dict:
temp = self._selection_dict[ss]
if strand in temp:
del temp[strand]
if len(temp) == 0:
del self._selection_dict[ss]
self._selected_changed_dict[strand] = (False, False)
return True
else:
return False
else:
return False
# end def
def selectionDict(self):
return self._selection_dict
# end def
def selectedOligos(self):
"""
as long as one endpoint of a strand is in the selection, then the oligo
is considered selected
"""
s_dict = self._selection_dict
selected_oligos = set()
for ss in s_dict.keys():
for strand in ss:
selected_oligos.add(strand.oligo())
# end for
# end for
return selected_oligos if len(selected_oligos) > 0 else None
#end def
def clearAllSelected(self):
self._selection_dict = {}
# the added list is what was recently selected or deselected
self._selected_changed_dict = {}
self.documentClearSelectionsSignal.emit(self)
# end def
def isModelSelected(self, obj):
return obj in self._selection_dict
# end def
def isModelStrandSelected(self, strand):
ss = strand.strandSet()
if ss in self._selection_dict:
if strand in self._selection_dict[ss]:
return True
else:
return False
else:
return False
# end def
def getSelectedValue(self, obj):
"""
obj is an objects to look up
it is prevetted to be in the dictionary
"""
return self._selection_dict[obj]
def getSelectedStrandValue(self, strand):
"""
strand is an objects to look up
it is prevetted to be in the dictionary
"""
return self._selection_dict[strand.strandSet()][strand]
# end def
def sortedSelectedStrands(self, strandset):
# outList = self._selection_dict[strandset].keys()
# outList.sort(key=Strand.lowIdx)
out_list = [x for x in self._selection_dict[strandset].items()]
getLowIdx = lambda x: Strand.lowIdx(itemgetter(0)(x))
out_list.sort(key=getLowIdx)
return out_list
# end def
def determineStrandSetBounds(self, selected_strand_list, strandset):
min_low_delta = strandset.partMaxBaseIdx()
min_high_delta = strandset.partMaxBaseIdx() # init the return values
ss_dict = self._selection_dict[strandset]
# get the StrandSet index of the first item in the list
ss_idx = strandset._findIndexOfRangeFor(selected_strand_list[0][0])[2]
ss_list = strandset._strand_list
len_ss_list = len(ss_list)
max_ss_idx = len_ss_list - 1
i = 0
for strand, value in selected_strand_list:
while strand != ss_list[ss_idx]:
# incase there are gaps due to double xovers
ss_idx += 1
# end while
idxL, idxH = strand.idxs()
if value[0]: # the end is selected
if ss_idx > 0:
low_neighbor = ss_list[ss_idx - 1]
if low_neighbor in ss_dict:
valueN = ss_dict[low_neighbor]
# we only care if the low neighbor is not selected
temp = min_low_delta if valueN[1] \
else idxL - low_neighbor.highIdx() - 1
# end if
else: # not selected
temp = idxL - low_neighbor.highIdx() - 1
# end else
else:
temp = idxL - 0
# end else
if temp < min_low_delta:
min_low_delta = temp
# end if
# check the other end of the strand
if not value[1]:
temp = idxH - idxL - 1
if temp < min_high_delta:
min_high_delta = temp
# end if
if value[1]:
if ss_idx < max_ss_idx:
high_neighbor = ss_list[ss_idx + 1]
if high_neighbor in ss_dict:
valueN = ss_dict[high_neighbor]
# we only care if the low neighbor is not selected
temp = min_high_delta if valueN[0] \
else high_neighbor.lowIdx() - idxH - 1
# end if
else: # not selected
temp = high_neighbor.lowIdx() - idxH - 1
# end else
else:
temp = strandset.partMaxBaseIdx() - idxH
# end else
if temp < min_high_delta:
min_high_delta = temp
# end if
# check the other end of the strand
if not value[0]:
temp = idxH - idxL - 1
if temp < min_low_delta:
min_low_delta = temp
# end if
# increment counter
ss_idx += 1
# end for
return (min_low_delta, min_high_delta)
# end def
def getSelectionBounds(self):
min_low_delta = -1
min_high_delta = -1
for strandset in self._selection_dict.keys():
selected_list = self.sortedSelectedStrands(strandset)
temp_low, temp_high = self.determineStrandSetBounds(
selected_list, strandset)
if temp_low < min_low_delta or min_low_delta < 0:
min_low_delta = temp_low
if temp_high < min_high_delta or min_high_delta < 0:
min_high_delta = temp_high
# end for Mark train bus to metro
return (min_low_delta, min_high_delta)
# end def
# def operateOnStrandSelection(self, method, arg, both=False):
# pass
# # end def
def deleteSelection(self, use_undostack=True):
"""
Delete selected strands. First iterates through all selected strands
and extracts refs to xovers and strands. Next, calls removeXover
on xoverlist as part of its own macroed command for isoluation
purposes. Finally, calls removeStrand on all strands that were
fully selected (low and high), or had at least one non-xover
endpoint selected.
"""
xoList = []
strand_dict = {}
for strandset_dict in self._selection_dict.values():
for strand, selected in strandset_dict.items():
part = strand.virtualHelix().part()
idxL, idxH = strand.idxs()
strand5p = strand.connection5p()
strand3p = strand.connection3p()
# both ends are selected
strand_dict[strand] = selected[0] and selected[1]
# only look at 3' ends to handle xover deletion
sel3p = selected[0] if idxL == strand.idx3Prime() else selected[1]
if sel3p: # is idx3p selected?
if strand3p: # is there an xover
xoList.append((part, strand, strand3p, use_undostack))
else: # idx3p is a selected endpoint
strand_dict[strand] = True
else:
if not strand5p: # idx5p is a selected endpoint
strand_dict[strand] = True
if use_undostack and xoList:
self.undoStack().beginMacro("Delete xovers")
for part, strand, strand3p, useUndo in xoList:
Part.removeXover(part, strand, strand3p, useUndo)
self.removeStrandFromSelection(strand)
self.removeStrandFromSelection(strand3p)
self._selection_dict = {}
self.documentClearSelectionsSignal.emit(self)
if use_undostack:
if xoList: # end xover macro if it was started
self.undoStack().endMacro()
if True in strand_dict.values():
self.undoStack().beginMacro("Delete selection")
else:
return # nothing left to do
for strand, delete in strand_dict.items():
if delete:
strand.strandSet().removeStrand(strand)
if use_undostack:
self.undoStack().endMacro()
def paintSelection(self, scafColor, stapColor, use_undostack=True):
"""Delete xovers if present. Otherwise delete everything."""
scaf_oligos = {}
stap_oligos = {}
for strandset_dict in self._selection_dict.values():
for strand, value in strandset_dict.items():
if strand.isScaffold():
scaf_oligos[strand.oligo()] = True
else:
stap_oligos[strand.oligo()] = True
if use_undostack:
self.undoStack().beginMacro("Paint strands")
for olg in scaf_oligos.keys():
olg.applyColor(scafColor)
for olg in stap_oligos.keys():
olg.applyColor(stapColor)
if use_undostack:
self.undoStack().endMacro()
def resizeSelection(self, delta, use_undostack=True):
"""
Moves the selected idxs by delta by first iterating over all strands
to calculate new idxs (method will return if snap-to behavior would
create illegal state), then applying a resize command to each strand.
"""
resize_list = []
# calculate new idxs
for strandset_dict in self._selection_dict.values():
for strand, selected in strandset_dict.items():
part = strand.virtualHelix().part()
idxL, idxH = strand.idxs()
newL, newH = strand.idxs()
deltaL = deltaH = delta
# process xovers to get revised delta
if selected[0] and strand.connectionLow():
newL = part.xoverSnapTo(strand, idxL, delta)
if newL == None:
return
deltaH = newL-idxL
if selected[1] and strand.connectionHigh():
newH = part.xoverSnapTo(strand, idxH, delta)
if newH == None:
return
deltaL = newH-idxH
# process endpoints
if selected[0] and not strand.connectionLow():
newL = idxL + deltaL
if selected[1] and not strand.connectionHigh():
newH = idxH + deltaH
if newL > newH: # check for illegal state
return
resize_list.append((strand, newL, newH))
# end for
# end for
# execute the resize commands
if use_undostack:
self.undoStack().beginMacro("Resize Selection")
for strand, idxL, idxH in resize_list:
Strand.resize(strand, (idxL, idxH), use_undostack)
if use_undostack:
self.undoStack().endMacro()
# end def
def updateSelection(self):
"""
do it this way in the future when we have
a better signaling architecture between views
"""
# self.documentSelectedChangedSignal.emit(self._selected_changed_dict)
"""
For now, individual objects need to emit signals
"""
for obj, value in self._selected_changed_dict.items():
obj.selectedChangedSignal.emit(obj, value)
# end for
self._selected_changed_dict = {}
# for ss in self._selection_dict:
# print self.sortedSelectedStrands(ss)
# end def
def resetViews(self):
# This is a fast way to clear selections and the views.
# We could manually deselect each item from the Dict, but we'll just
# let them be garbage collect
# the dictionary maintains what is selected
self._selection_dict = {}
# the added list is what was recently selected or deselected
self._selected_changed_dict = {}
self.documentViewResetSignal.emit(self)
# end def
### PUBLIC METHODS FOR EDITING THE MODEL ###
def addHoneycombPart(self, max_row=prefs.HONEYCOMB_PART_MAXROWS,
max_col=prefs.HONEYCOMB_PART_MAXCOLS,
max_steps=prefs.HONEYCOMB_PART_MAXSTEPS):
"""
Create and store a new DNAPart and instance, and return the instance.
"""
dnapart = None
if len(self._parts) == 0:
dnapart = HoneycombPart(document=self, max_row=max_row,
max_col=max_col, max_steps=max_steps)
self._addPart(dnapart)
return dnapart
def addSquarePart(self, max_row=prefs.SQUARE_PART_MAXROWS,
max_col=prefs.SQUARE_PART_MAXCOLS,
max_steps=prefs.SQUARE_PART_MAXSTEPS):
"""
Create and store a new DNAPart and instance, and return the instance.
"""
dnapart = None
if len(self._parts) == 0:
dnapart = SquarePart(document=self, max_row=max_row,
max_col=max_col, max_steps=max_steps)
self._addPart(dnapart)
return dnapart
def removeAllParts(self):
"""Used to reset the document. Not undoable."""
self.documentClearSelectionsSignal.emit(self)
for part in self._parts:
part.remove(use_undostack=False)
# end def
def removePart(self, part):
self.documentClearSelectionsSignal.emit(self)
self._parts.remove(part)
### PUBLIC SUPPORT METHODS ###
def controller(self):
return self._controller
def setController(self, controller):
"""Called by DocumentController setDocument method."""
self._controller = controller
# end def
def setSelectedPart(self, newPart):
if self._selected_part == newPart:
return
self._selected_part = newPart
# end def
### PRIVATE SUPPORT METHODS ###
def _addPart(self, part, use_undostack=True):
"""Add part to the document via AddPartCommand."""
c = self.AddPartCommand(self, part)
util.execCommandList(
self, [c], desc="Add part", use_undostack=use_undostack)
return c.part()
# end def
### COMMANDS ###
class AddPartCommand(UndoCommand):
"""
Undo ready command for deleting a part.
"""
def __init__(self, document, part):
super(Document.AddPartCommand, self).__init__("add part")
self._doc = document
self._part = part
# end def
def part(self):
return self._part
# end def
def redo(self):
if len(self._doc._parts) == 0:
self._doc._parts.append(self._part)
self._part.setDocument(self._doc)
self._doc.setSelectedPart(self._part)
self._doc.documentPartAddedSignal.emit(self._doc, self._part)
# end def
def undo(self):
self._doc.removePart(self._part)
self._part.setDocument(None)
self._doc.setSelectedPart(None)
self._part.partRemovedSignal.emit(self._part)
# self._doc.documentPartAddedSignal.emit(self._doc, self._part)
# end def
# end class
# end class
|
mit
| 5,810,336,417,466,848,000
| 35.831461
| 102
| 0.552725
| false
| 4.193603
| false
| false
| false
|
CrystallineEntity/bulbs
|
bulbs/views/home.py
|
1
|
1916
|
from pyramid.view import view_config
from bulbs.components.subcategory import number_of_threads, number_of_posts, last_post
from bulbs.components import db
def catinfo(cat):
keys = "id", "title", "desc", "slug"
keys_values = zip(keys, cat)
return dict(keys_values)
def categories():
"""Return a dict containing all categories."""
cursor = db.con.cursor()
cursor.execute("SELECT id, title, description, slug FROM bulbs_category")
cats = cursor.fetchall()
data = map(catinfo, cats)
return data
def subcatinfo(data):
keys = "id", "title", "category_id", "desc", "slug"
keys_values = zip(keys, data)
id = data[0]
return dict(keys_values,
id=id,
threads=number_of_threads(id),
posts=number_of_posts(id),
last_post=last_post(id)
)
def subcategories(cat_id=None):
"""Return a dict containing information from a specified category or forums for every category."""
cursor = db.con.cursor()
if cat_id is not None:
cursor.execute(
"SELECT id, title, category_id, description, slug FROM bulbs_subcategory \
WHERE category_id = %s", (cat_id, ))
else:
cursor.execute(
"SELECT id, title, category_id, description, slug FROM bulbs_subcategory")
children = cursor.fetchall()
subcategories_ = map(subcatinfo, children)
return subcategories_
@view_config(route_name="home", renderer="home.mako")
def response(request):
cursor = db.con.cursor()
cats = categories()
subcats = list(subcategories())
cursor.execute("SELECT username FROM bulbs_user ORDER BY date DESC LIMIT 1")
newest_user = cursor.fetchone()[0]
return {
"project": request.registry.settings.get("site_name"),
"title": "Home",
"categories": cats,
"subcategories": subcats,
"new_member": newest_user
}
|
mit
| -4,394,526,757,759,784,400
| 32.034483
| 102
| 0.634134
| false
| 3.642586
| false
| false
| false
|
repotvsupertuga/tvsupertuga.repository
|
script.module.cryptolib/lib/Crypto/PublicKey/RSA.py
|
1
|
2753
|
#!/usr/bin/env python
from __future__ import absolute_import
import binascii
import struct
from rsa import PublicKey, PrivateKey
from Crypto.Math.Numbers import Integer
def import_key(extern_key, passphrase=None):
"""Import an RSA key (public or private half), encoded in standard
form.
:Parameter extern_key:
The RSA key to import, encoded as a byte string.
An RSA public key can be in any of the following formats:
- X.509 certificate (binary or PEM format)
- X.509 ``subjectPublicKeyInfo`` DER SEQUENCE (binary or PEM
encoding)
- `PKCS#1`_ ``RSAPublicKey`` DER SEQUENCE (binary or PEM encoding)
- OpenSSH (textual public key only)
An RSA private key can be in any of the following formats:
- PKCS#1 ``RSAPrivateKey`` DER SEQUENCE (binary or PEM encoding)
- `PKCS#8`_ ``PrivateKeyInfo`` or ``EncryptedPrivateKeyInfo``
DER SEQUENCE (binary or PEM encoding)
- OpenSSH (textual public key only)
For details about the PEM encoding, see `RFC1421`_/`RFC1423`_.
The private key may be encrypted by means of a certain pass phrase
either at the PEM level or at the PKCS#8 level.
:Type extern_key: string
:Parameter passphrase:
In case of an encrypted private key, this is the pass phrase from
which the decryption key is derived.
:Type passphrase: string
:Return: An RSA key object (`RsaKey`).
:Raise ValueError/IndexError/TypeError:
When the given key cannot be parsed (possibly because the pass
phrase is wrong).
.. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt
.. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt
.. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt
.. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt
"""
if passphrase is not None:
raise ValueError("RSA key passphrase is not supported")
if extern_key.startswith('ssh-rsa '):
# This is probably an OpenSSH key
keystring = binascii.a2b_base64(extern_key.split(' ')[1])
keyparts = []
while len(keystring) > 4:
l = struct.unpack(">I", keystring[:4])[0]
keyparts.append(keystring[4:4 + l])
keystring = keystring[4 + l:]
e = Integer.from_bytes(keyparts[1])
n = Integer.from_bytes(keyparts[2])
return PublicKey(n._value, e._value)
for fmt in ("PEM", "DER"):
try:
return PrivateKey.load_pkcs1(extern_key, fmt)
except:
try:
return PublicKey.load_pkcs1(extern_key, fmt)
except:
pass
raise ValueError("RSA key format is not supported")
# Backward compatibility
importKey = import_key
|
gpl-2.0
| 2,940,562,159,866,837,000
| 33.848101
| 74
| 0.631311
| false
| 3.882934
| false
| false
| false
|
fatcloud/PyCV-time
|
experiments/stop_motion_tool/stop_motion_tool.py
|
1
|
1533
|
from cam import OpenCV_Cam
import cv2
import os.path
import time
cam = OpenCV_Cam(0)
cam.size = (1920, 1080)
KEY_ESC = 27
KEY_SPACE = ord(' ')
PAGE_DOWN = 2228224 # This make the stop motion to be controllable by presenter.
prevFrame = None
i = 0
#Make a directory on current working directory with date and time as its name
timestr = time.strftime("%Y%m%d-%H%M%S")
cwd = os.getcwd()
dirName = cwd + "\\"+timestr
os.makedirs(dirName)
fname= cwd + "\\frame_.png"
if os.path.isfile(fname):
prevFrame = cv2.imread(fname)
#Make .avi file from collected frames
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter(dirName+"\\"+'output_.avi',fourcc, 3.0, cam.size, isColor =True)
while True:
# Capture frame-by-frame
frame = cam.read()
# image processing functions
# Load the frame into a window named as 'Press any key to exit'
if (prevFrame is not None):
showFrame = cv2.addWeighted(frame,0.7,prevFrame,0.3,0)
else:
showFrame = frame
resizeShowFrame = cv2.resize(showFrame, (0,0), fx = 0.5, fy = 0.5 )
cv2.imshow('Press ESC to exit', resizeShowFrame)
# wait for the key
key_code = cv2.waitKey(10)
if key_code is KEY_SPACE or key_code == PAGE_DOWN:
cv2.imwrite(dirName+"\\"+'frame'+str(i)+'_.png', frame)
video.write(frame)
prevFrame = frame
i += 1
elif key_code is KEY_ESC:
cv2.imwrite(cwd + '\\frame_.png', prevFrame)
break
cv2.destroyAllWindows()
cam.release()
video.release()
|
mit
| 5,548,077,770,134,502,000
| 24.131148
| 88
| 0.641879
| false
| 3
| false
| false
| false
|
jtpereyda/boofuzz
|
boofuzz/primitives/bit_field.py
|
1
|
6982
|
import struct
from builtins import range
import six
from past.builtins import map
from .. import helpers
from ..constants import LITTLE_ENDIAN
from ..fuzzable import Fuzzable
def binary_string_to_int(binary):
"""
Convert a binary string to a decimal number.
@type binary: str
@param binary: Binary string
@rtype: int
@return: Converted bit string
"""
return int(binary, 2)
def int_to_binary_string(number, bit_width):
"""
Convert a number to a binary string.
@type number: int
@param number: (Optional, def=self._value) Number to convert
@type bit_width: int
@param bit_width: (Optional, def=self.width) Width of bit string
@rtype: str
@return: Bit string
"""
return "".join(map(lambda x: str((number >> x) & 1), range(bit_width - 1, -1, -1)))
class BitField(Fuzzable):
"""
The bit field primitive represents a number of variable length and is used to define all other integer types.
:type name: str, optional
:param name: Name, for referencing later. Names should always be provided, but if not, a default name will be given,
defaults to None
:type default_value: int, optional
:param default_value: Default integer value, defaults to 0
:type width: int, optional
:param width: Width in bits, defaults to 8
:type max_num: int, optional
:param max_num: Maximum number to iterate up to, defaults to None
:type endian: char, optional
:param endian: Endianness of the bit field (LITTLE_ENDIAN: <, BIG_ENDIAN: >), defaults to LITTLE_ENDIAN
:type output_format: str, optional
:param output_format: Output format, "binary" or "ascii", defaults to binary
:type signed: bool, optional
:param signed: Make size signed vs. unsigned (applicable only with format="ascii"), defaults to False
:type full_range: bool, optional
:param full_range: If enabled the field mutates through *all* possible values, defaults to False
:type fuzz_values: list, optional
:param fuzz_values: List of custom fuzz values to add to the normal mutations, defaults to None
:type fuzzable: bool, optional
:param fuzzable: Enable/disable fuzzing of this primitive, defaults to true
"""
def __init__(
self,
name=None,
default_value=0,
width=8,
max_num=None,
endian=LITTLE_ENDIAN,
output_format="binary",
signed=False,
full_range=False,
*args,
**kwargs
):
super(BitField, self).__init__(name=name, default_value=default_value, *args, **kwargs)
assert isinstance(width, six.integer_types), "width must be an integer!"
self.width = width
self.max_num = max_num
self.endian = endian
self.format = output_format
self.signed = signed
self.full_range = full_range
if not self.max_num:
self.max_num = binary_string_to_int("1" + "0" * width)
assert isinstance(self.max_num, six.integer_types), "max_num must be an integer!"
def _iterate_fuzz_lib(self):
if self.full_range:
for i in range(0, self.max_num):
yield i
else:
# try only "smart" values.
interesting_boundaries = [
0,
self.max_num // 2,
self.max_num // 3,
self.max_num // 4,
self.max_num // 8,
self.max_num // 16,
self.max_num // 32,
self.max_num,
]
for boundary in interesting_boundaries:
for v in self._yield_integer_boundaries(boundary):
yield v
# TODO Add a way to inject a list of fuzz values
# elif isinstance(default_value, (list, tuple)):
# for val in iter(default_value):
# yield val
# TODO: Add injectable arbitrary bit fields
def _yield_integer_boundaries(self, integer):
"""
Add the supplied integer and border cases to the integer fuzz heuristics library.
@type integer: int
@param integer: int to append to fuzz heuristics
"""
for i in range(-10, 10):
case = integer + i
if 0 <= case < self.max_num:
# some day: if case not in self._user_provided_values
yield case
def encode(self, value, mutation_context):
temp = self._render_int(
value, output_format=self.format, bit_width=self.width, endian=self.endian, signed=self.signed
)
return helpers.str_to_bytes(temp)
def mutations(self, default_value):
for val in self._iterate_fuzz_lib():
yield val
@staticmethod
def _render_int(value, output_format, bit_width, endian, signed):
"""
Convert value to a bit or byte string.
Args:
value (int): Value to convert to a byte string.
output_format (str): "binary" or "ascii"
bit_width (int): Width of output in bits.
endian: BIG_ENDIAN or LITTLE_ENDIAN
signed (bool):
Returns:
str: value converted to a byte string
"""
if output_format == "binary":
bit_stream = ""
rendered = b""
# pad the bit stream to the next byte boundary.
if bit_width % 8 == 0:
bit_stream += int_to_binary_string(value, bit_width)
else:
bit_stream = "0" * (8 - (bit_width % 8))
bit_stream += int_to_binary_string(value, bit_width)
# convert the bit stream from a string of bits into raw bytes.
for i in range(len(bit_stream) // 8):
chunk_min = 8 * i
chunk_max = chunk_min + 8
chunk = bit_stream[chunk_min:chunk_max]
rendered += struct.pack("B", binary_string_to_int(chunk))
# if necessary, convert the endianness of the raw bytes.
if endian == LITTLE_ENDIAN:
# reverse the bytes
rendered = rendered[::-1]
_rendered = rendered
else:
# Otherwise we have ascii/something else
# if the sign flag is raised and we are dealing with a signed integer (first bit is 1).
if signed and int_to_binary_string(value, bit_width)[0] == "1":
max_num = binary_string_to_int("1" + "0" * (bit_width - 1))
# chop off the sign bit.
val = value & binary_string_to_int("1" * (bit_width - 1))
# account for the fact that the negative scale works backwards.
val = max_num - val - 1
# toss in the negative sign.
_rendered = "%d" % ~val
# unsigned integer or positive signed integer.
else:
_rendered = "%d" % value
return _rendered
|
gpl-2.0
| 3,794,698,004,206,549,500
| 33.564356
| 120
| 0.574048
| false
| 4.049884
| false
| false
| false
|
mtils/ems
|
ems/qt/richtext/char_format_actions.py
|
1
|
6238
|
from ems.qt import QtWidgets, QtCore, QtGui
from ems.qt.richtext.char_format_proxy import CharFormatProxy
Qt = QtCore.Qt
QObject = QtCore.QObject
QColor = QtGui.QColor
QAction = QtWidgets.QAction
QKeySequence = QtGui.QKeySequence
QFont = QtGui.QFont
QIcon = QtGui.QIcon
QPixmap = QtGui.QPixmap
ThemeIcon = QIcon.fromTheme
QApplication = QtWidgets.QApplication
QColorDialog = QtWidgets.QColorDialog
QFontComboBox = QtWidgets.QFontComboBox
QComboBox = QtWidgets.QComboBox
QFontDatabase = QtGui.QFontDatabase
QTextDocument = QtGui.QTextDocument
QTextCharFormat = QtGui.QTextCharFormat
pyqtSignal = QtCore.pyqtSignal
pyqtSlot = QtCore.pyqtSlot
pyqtProperty = QtCore.pyqtProperty
class CharFormatActions(QObject):
documentChanged = pyqtSignal(QTextDocument)
currentBlockFormatChanged = pyqtSignal(QTextCharFormat)
def __init__(self, parentWidget, signalProxy=None, resourcePath=':/text-editor'):
super(CharFormatActions, self).__init__(parentWidget)
self.resourcePath = resourcePath
self.actions = []
self.widgets = []
self.signals = CharFormatProxy(self) if signalProxy is None else signalProxy
self._addActions(self.parent())
self._document = QTextDocument()
self._lastBlockFormat = None
def getDocument(self):
return self._document
@pyqtSlot(QTextDocument)
def setDocument(self, document):
if self._document is document:
return
if self._document:
self._disconnectFromDocument(self._document)
self._document = document
self.documentChanged.emit(self._document)
document = pyqtProperty(QTextDocument, getDocument, setDocument)
def _disconnectFromDocument(self, document):
return
def _addActions(self, parent):
self.actionTextBold = QAction(
ThemeIcon('format-text-bold', self._icon('bold.png')),
"&Bold", parent, priority=QAction.LowPriority,
shortcut=Qt.CTRL + Qt.Key_B,
triggered=self.signals.setBold, checkable=True)
bold = QFont()
bold.setBold(True)
self.actionTextBold.setFont(bold)
self.signals.boldChanged.connect(self.actionTextBold.setChecked)
self.actions.append(self.actionTextBold)
self.actionTextItalic = QAction(
ThemeIcon('format-text-italic', self._icon('italic.png')),
"&Italic", self, priority=QAction.LowPriority,
shortcut=Qt.CTRL + Qt.Key_I,
triggered=self.signals.setItalic, checkable=True)
italic = QFont()
italic.setItalic(True)
self.actionTextItalic.setFont(italic)
self.signals.italicChanged.connect(self.actionTextItalic.setChecked)
self.actions.append(self.actionTextItalic)
self.actionTextUnderline = QAction(
ThemeIcon('format-text-underline', self._icon('underline.png')),
"&Underline", self, priority=QAction.LowPriority,
shortcut=Qt.CTRL + Qt.Key_U,
triggered=self.signals.setUnderline, checkable=True)
underline = QFont()
underline.setUnderline(True)
self.actionTextUnderline.setFont(underline)
self.actions.append(self.actionTextUnderline)
self.signals.underlineChanged.connect(self.actionTextUnderline.setChecked)
pix = QPixmap(16, 16)
pix.fill(Qt.black)
self.actionTextColor = QAction(QIcon(pix), "&Color...",
self, triggered=self._textColor)
self.signals.foregroundColorChanged.connect(self._colorChanged)
self.actions.append(self.actionTextColor)
self.actionClearFormat = QAction(ThemeIcon('format-text-clear', self._icon('magic.png')),
"&Remove Format", self, priority=QAction.LowPriority,
shortcut=Qt.CTRL + Qt.Key_E,
triggered=self.signals.clearFormat)
self.actions.append(self.actionClearFormat)
self.fontCombo = QFontComboBox()
self.fontCombo.activated[str].connect(self.signals.setFontFamily)
self.signals.fontFamilyChanged.connect(self.setFontFamily)
self.widgets.append(self.fontCombo)
self.sizeCombo = QComboBox()
self.sizeCombo.setObjectName("sizeCombo")
self.sizeCombo.setEditable(True)
self.signals.pointSizeChanged.connect(self.setFontPointSize)
self.widgets.append(self.sizeCombo)
db = QFontDatabase()
for size in db.standardSizes():
self.sizeCombo.addItem("{}".format(size))
self.sizeCombo.activated[str].connect(self._textSize)
self.sizeCombo.setCurrentIndex(
self.sizeCombo.findText(
"{}".format(QApplication.font().pointSize())
)
)
def _textColor(self):
color = self.signals.getForegroundColor()
if not color:
color = QColor(0,0,0)
col = QColorDialog.getColor(color, self.parent())
if not col.isValid():
return
self.signals.setForegroundColor(col)
def _colorChanged(self, color):
pix = QPixmap(16, 16)
pix.fill(color)
self.actionTextColor.setIcon(QIcon(pix))
def _textSize(self, pointSize):
pointSize = float(pointSize)
if pointSize < 0:
return
self.signals.setPointSize(pointSize)
def addToToolbar(self, toolbar, addActions=True, addWidgets=True):
if addActions:
for action in self.actions:
toolbar.addAction(action)
if not addWidgets:
return
for widget in self.widgets:
widget.setParent(toolbar)
toolbar.addWidget(widget)
def setFontFamily(self, family):
self.fontCombo.setCurrentIndex(self.fontCombo.findText(family))
def setFontPointSize(self, pointSize):
self.sizeCombo.setCurrentIndex(self.sizeCombo.findText("{}".format(int(pointSize))))
def iconPath(self, fileName):
return self.resourcePath + '/' + fileName
def _icon(self, fileName):
return QIcon(self.iconPath(fileName))
|
mit
| 4,689,382,300,073,918,000
| 32.543011
| 97
| 0.648124
| false
| 4.14485
| false
| false
| false
|
Murano/microqa
|
app/forms.py
|
1
|
1790
|
# -*- coding: utf-8 -*-
from flask import flash
from flask_wtf import Form
from wtforms import StringField, TextAreaField, PasswordField, TextField
from model import User
from wtforms.validators import DataRequired, ValidationError
class QuestionForm(Form):
title = StringField(u"Заголовок вопроса", validators=[DataRequired()])
body = TextAreaField(u"Тело вопроса", validators=[DataRequired()])
tags = StringField(u"Тэги (через запятую)", validators=[DataRequired()])
class CommentForm(Form):
body = TextAreaField(u"Ответ", validators=[DataRequired()])
class LoginForm(Form):
username = StringField(u"Логин", validators=[DataRequired()])
password = PasswordField(u"Пароль", validators=[DataRequired()])
def validate_username(self, field):
user = self.get_user()
if user is None:
raise ValidationError(u'Неверное имя пользователя')
if user.password != self.password.data:
raise ValidationError(u'Неверный пароль')
def get_user(self):
return User.objects(username=self.username.data).first()
class RegistrationForm(Form):
username = TextField(u"Логин", validators=[DataRequired()])
email = TextField(u"E-mail", validators=[DataRequired()] ) # TODO: validate
password = PasswordField(u"Пароль", validators=[DataRequired()])
def validate_username(self, field):
if User.objects(username=self.username.data):
raise ValidationError(u'Такой логин уже занят')
def validate_email(self, field):
if User.objects(email=self.email.data):
raise ValidationError(u'Такой email адрес уже существует')
|
mit
| 4,633,618,319,600,694,000
| 36.340909
| 79
| 0.704019
| false
| 3.232283
| false
| false
| false
|
OSVR/UIforETWbins
|
bin/StripChromeSymbols.py
|
1
|
13713
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script exists to work around severe performance problems when WPA or other
Windows Performance Toolkit programs try to load the symbols for the Chrome
web browser. Some combination of the enormous size of the symbols or the
enhanced debug information generated by /Zo causes WPA to take about twenty
minutes to process the symbols for chrome.dll and chrome_child.dll. When
profiling Chrome this delay happens with every new set of symbols, so with
every new version of Chrome.
This script uses xperf actions to dump a list of the symbols referenced in
an ETW trace. If chrome.dll, chrome_child.dll, content.dll, or blink_web.dll are
detected and if decoded symbols are not found in %_NT_SYMCACHE_PATH% (default is
c:\symcache) then RetrieveSymbols.exe is used to download the symbols from the
Chromium symbol server, pdbcopy.exe is used to strip the private symbols, and
then another xperf action is used to load the stripped symbols, thus converting
them to .symcache files that can be efficiently loaded by WPA.
Locally built Chrome symbols are also supported.
More details on the discovery of this slowness and the evolution of the fix
can be found here:
https://randomascii.wordpress.com/2014/11/04/slow-symbol-loading-in-microsofts-profiler-take-two/
Discussion can be found here:
https://randomascii.wordpress.com/2013/03/09/symbols-the-microsoft-way/
Source code for RetrieveSymbols.exe can be found here:
https://github.com/google/UIforETW/tree/master/RetrieveSymbols
If "chromium-browser-symsrv" is not found in _NT_SYMBOL_PATH or RetrieveSymbols.exe
and pdbcopy.exe are not found then this script will exit early.
With the 10.0.14393 version of WPA the symbol translation problems have largely
been eliminated, which seems like it would make this script unnecessary, but the
symbol translation slowdowns have been replaced by a bug in downloading symbols from
Chrome's symbol server.
"""
from __future__ import print_function
import os
import sys
import re
import tempfile
import shutil
import subprocess
# Set to true to do symbol translation as well as downloading. Set to
# false to just download symbols and let WPA translate them.
strip_and_translate = True
def main():
if len(sys.argv) < 2:
print("Usage: %s trace.etl" % sys.argv[0])
sys.exit(0)
# Our usage of subprocess seems to require Python 2.7+
if sys.version_info.major == 2 and sys.version_info.minor < 7:
print("Your python version is too old - 2.7 or higher required.")
print("Python version is %s" % sys.version)
sys.exit(0)
symbol_path = os.environ.get("_NT_SYMBOL_PATH", "")
if symbol_path.count("chromium-browser-symsrv") == 0:
print("Chromium symbol server is not in _NT_SYMBOL_PATH. No symbol stripping needed.")
sys.exit(0)
script_dir = os.path.split(sys.argv[0])[0]
retrieve_path = os.path.join(script_dir, "RetrieveSymbols.exe")
pdbcopy_path = os.path.join(script_dir, "pdbcopy.exe")
if os.environ.has_key("programfiles(x86)"):
# The UIforETW copy of pdbcopy.exe fails to copy some Chrome PDBs that the
# Windows 10 SDK version can copy - use it if present.
pdbcopy_install = os.path.join(os.environ["programfiles(x86)"], r"Windows kits\10\debuggers\x86\pdbcopy.exe")
if os.path.exists(pdbcopy_install):
pdbcopy_path = pdbcopy_install
# This tool converts PDBs created with /debug:fastlink (VC++ 2015 feature) to
# regular PDBs that contain all of the symbol information directly. This is
# required so that pdbcopy can copy the symbols.
un_fastlink_tool = r"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64\mspdbcmf.exe"
if not os.path.exists(un_fastlink_tool):
un_fastlink_tool = None
# RetrieveSymbols.exe requires some support files. dbghelp.dll and symsrv.dll
# have to be in the same directory as RetrieveSymbols.exe and pdbcopy.exe must
# be in the path, so copy them all to the script directory.
for third_party in ["pdbcopy.exe", "dbghelp.dll", "symsrv.dll"]:
if not os.path.exists(third_party):
source = os.path.normpath(os.path.join(script_dir, r"..\third_party", \
third_party))
dest = os.path.normpath(os.path.join(script_dir, third_party))
shutil.copy2(source, dest)
if not os.path.exists(pdbcopy_path):
print("pdbcopy.exe not found. No symbol stripping is possible.")
sys.exit(0)
if not os.path.exists(retrieve_path):
print("RetrieveSymbols.exe not found. No symbol retrieval is possible.")
sys.exit(0)
tracename = sys.argv[1]
# Each symbol file that we pdbcopy gets copied to a separate directory so
# that we can support decoding symbols for multiple chrome versions without
# filename collisions.
tempdirs = []
# Typical output looks like:
# "[RSDS] PdbSig: {0e7712be-af06-4421-884b-496f833c8ec1}; Age: 33; Pdb: D:\src\chromium2\src\out\Release\initial\chrome.dll.pdb"
# Note that this output implies a .symcache filename like this:
# chrome.dll-0e7712beaf064421884b496f833c8ec121v2.symcache
# In particular, note that the xperf action prints the age in decimal, but the
# symcache names use the age in hexadecimal!
pdb_re = re.compile(r'"\[RSDS\] PdbSig: {(.*-.*-.*-.*-.*)}; Age: (.*); Pdb: (.*)"')
pdb_cached_re = re.compile(r"Found .*file - placed it in (.*)")
print("Pre-translating chrome symbols from stripped PDBs to avoid 10-15 minute translation times "
"and to work around WPA symbol download bugs.")
symcache_files = []
# Keep track of the local symbol files so that we can temporarily rename them
# to stop xperf from using -- rename them from .pdb to .pdbx
local_symbol_files = []
#-tle = tolerate lost events
#-tti = tolerate time ivnersions
#-a symcache = show image and symbol identification (see xperf -help processing)
#-dbgid = show symbol identification information (see xperf -help symcache)
command = 'xperf -i "%s" -tle -tti -a symcache -dbgid' % tracename
print("> %s" % command)
found_uncached = False
raw_command_output = subprocess.check_output(command, stderr=subprocess.STDOUT)
command_output = str(raw_command_output).splitlines()
for line in command_output:
dllMatch = None # This is the name to use when generating the .symcache files
if line.count("chrome_child.dll") > 0:
# The symcache files for chrome_child.dll use the name chrome.dll for some reason
dllMatch = "chrome.dll"
# Complete list of Chrome executables and binaries. Some are only used in internal builds.
# Note that case matters for downloading PDBs.
for dllName in ["chrome.exe", "chrome.dll", "blink_web.dll", "content.dll", "chrome_elf.dll", "chrome_watcher.dll", "libEGL.dll", "libGLESv2.dll"]:
if line.count("\\" + dllName) > 0:
dllMatch = dllName
if dllMatch:
match = pdb_re.match(line)
if match:
guid, age, path = match.groups()
guid = guid.replace("-", "")
age = int(age) # Prepare for printing as hex
filepart = os.path.split(path)[1]
symcache_file = r"c:\symcache\%s-%s%xv2.symcache" % (dllMatch, guid, age)
if os.path.exists(symcache_file):
#print("Symcache file %s already exists. Skipping." % symcache_file)
continue
# Only print messages for chrome PDBs that aren't in the symcache
found_uncached = True
print("Found uncached reference to %s: %s - %s" % (filepart, guid, age, ))
symcache_files.append(symcache_file)
pdb_cache_path = None
retrieve_command = "%s %s %s %s" % (retrieve_path, guid, age, filepart)
print(" > %s" % retrieve_command)
for subline in os.popen(retrieve_command):
cache_match = pdb_cached_re.match(subline.strip())
if cache_match:
pdb_cache_path = cache_match.groups()[0]
# RetrieveSymbols puts a period at the end of the output, so strip that.
if pdb_cache_path.endswith("."):
pdb_cache_path = pdb_cache_path[:-1]
if strip_and_translate and not pdb_cache_path:
# Look for locally built symbols
if os.path.exists(path):
pdb_cache_path = path
local_symbol_files.append(path)
if pdb_cache_path:
if strip_and_translate:
tempdir = tempfile.mkdtemp()
tempdirs.append(tempdir)
dest_path = os.path.join(tempdir, os.path.basename(pdb_cache_path))
print(" Copying PDB to %s" % dest_path)
# For some reason putting quotes around the command to be run causes
# it to fail. So don't do that.
copy_command = '%s "%s" "%s" -p' % (pdbcopy_path, pdb_cache_path, dest_path)
print(" > %s" % copy_command)
if un_fastlink_tool:
# If the un_fastlink_tool is available then run the pdbcopy command in a
# try block. If pdbcopy fails then run the un_fastlink_tool and try again.
try:
output = str(subprocess.check_output(copy_command, stderr=subprocess.STDOUT))
if output:
print(" %s" % output, end="")
except:
convert_command = '%s "%s"' % (un_fastlink_tool, pdb_cache_path)
print("Attempting to un-fastlink PDB so that pdbcopy can strip it. This may be slow.")
print(" > %s" % convert_command)
subprocess.check_output(convert_command)
output = str(subprocess.check_output(copy_command, stderr=subprocess.STDOUT))
if output:
print(" %s" % output, end="")
else:
output = str(subprocess.check_output(copy_command, stderr=subprocess.STDOUT))
if output:
print(" %s" % output, end="")
if not os.path.exists(dest_path):
print("Aborting symbol generation because stripped PDB '%s' does not exist. WPA symbol loading may be slow." % dest_path)
sys.exit(0)
else:
print(" Symbols retrieved.")
else:
print(" Failed to retrieve symbols.")
if tempdirs:
symbol_path = ";".join(tempdirs)
print("Stripped PDBs are in %s. Converting to symcache files now." % symbol_path)
os.environ["_NT_SYMBOL_PATH"] = symbol_path
# Create a list of to/from renamed tuples
renames = []
error = False
try:
rename_errors = False
for local_pdb in local_symbol_files:
temp_name = local_pdb + "x"
print("Renaming %s to %s to stop unstripped PDBs from being used." % (local_pdb, temp_name))
try:
# If the destination file exists we have to rename it or else the
# rename will fail.
if os.path.exists(temp_name):
os.remove(temp_name)
os.rename(local_pdb, temp_name)
except:
# Rename can and does throw exceptions. We must catch and continue.
e = sys.exc_info()[0]
print("Hit exception while renaming %s to %s. Continuing.\n%s" % (local_pdb, temp_name, e))
rename_errors = True
else:
renames.append((local_pdb, temp_name))
#-build = build the symcache store for this trace (see xperf -help symcache)
if rename_errors:
print("Skipping symbol generation due to PDB rename errors. WPA symbol loading may be slow.")
else:
gen_command = 'xperf -i "%s" -symbols -tle -tti -a symcache -build' % tracename
print("> %s" % gen_command)
for line in os.popen(gen_command).readlines():
pass # Don't print line
except KeyboardInterrupt:
# Catch Ctrl+C exception so that PDBs will get renamed back.
if renames:
print("Ctrl+C detected. Renaming PDBs back.")
error = True
for rename_names in renames:
try:
os.rename(rename_names[1], rename_names[0])
except:
# Rename can and does throw exceptions. We must catch and continue.
e = sys.exc_info()[0]
print("Hit exception while renaming %s back. Continuing.\n%s" % (rename_names[1], e))
for symcache_file in symcache_files:
if os.path.exists(symcache_file):
print("%s generated." % symcache_file)
else:
print("Error: %s not generated." % symcache_file)
error = True
# Delete the stripped PDB files
if error:
print("Retaining PDBs to allow rerunning xperf command-line.")
print("If re-running the command be sure to go:")
print("set _NT_SYMBOL_PATH=%s" % symbol_path)
else:
for directory in tempdirs:
shutil.rmtree(directory, ignore_errors=True)
elif strip_and_translate:
if found_uncached:
print("No PDBs copied, nothing to do.")
else:
print("No uncached PDBS found, nothing to do.")
if __name__ == "__main__":
main()
|
apache-2.0
| 8,157,729,412,689,827,000
| 44.795222
| 151
| 0.651448
| false
| 3.702674
| false
| false
| false
|
Loreton/MP3Catalog
|
Source/Project/ExcelDB/ExcelCatalog.py
|
1
|
1400
|
#!/usr/bin/python
# -*- coding: iso-8859-1 -*-
#
# Scope: Programma per ...........
# by Loreto Notarantonio 2013, February
# ######################################################################################
import os, sys
import ast
################################################################################
# - M A I N
# - Prevede:
# - 2 - Controllo parametri di input
# - 5 - Chiamata al programma principale del progetto
################################################################################
def ReadExcelDB(gv, xlsFile, rangeToProcess):
logger = gv.Ln.SetLogger(package=__name__)
C = gv.Ln.LnColor()
csvFileInput = xlsFile.rsplit('.', -1)[0] + '.csv'
logger.debug('XLS file name: {0}'.format(xlsFile))
logger.debug('CSV file name: {0}'.format(csvFileInput))
# - Se il csv è più vecchio dell'xls facciamo l'export
if gv.Ln.Fmtime(xlsFile) > gv.Ln.Fmtime(csvFileInput):
msg= 'range To process: {0}'.format(rangeToProcess)
logger.debug(msg); print(msg)
mydata = gv.Ln.Excel(xlsFile)
mydata.exportCSV('Catalog', outFname=csvFileInput, rangeString=rangeToProcess, colNames=4, fPRINT=True)
else:
msg = 'excel file is older than CSV file. No export will take place.'
logger.debug(msg); print(msg)
return csvFileInput
|
unlicense
| -2,451,188,132,575,044,000
| 32.333333
| 111
| 0.502143
| false
| 3.655352
| false
| false
| false
|
psi4/mongo_qcdb
|
qcfractal/storage_sockets/db_queries.py
|
1
|
14717
|
from typing import List, Optional, Set, Union
from sqlalchemy import Integer, inspect
from sqlalchemy.sql import bindparam, text
from qcfractal.interface.models import Molecule, ResultRecord
from qcfractal.storage_sockets.models import MoleculeORM, ResultORM
QUERY_CLASSES = set()
class QueryBase:
# The name/alias used by the REST APIs to access this class
_class_name = None
_available_groupby = set()
# Mapping of the requested feature and the internal query method
_query_method_map = {}
def __init__(self, database_name, max_limit=1000):
self.database_name = database_name
self.max_limit = max_limit
def __init_subclass__(cls, **kwargs):
if cls not in QUERY_CLASSES:
QUERY_CLASSES.add(cls)
super().__init_subclass__(**kwargs)
def query(self, session, query_key, limit=0, skip=0, include=None, exclude=None, **kwargs):
if query_key not in self._query_method_map:
raise TypeError(f"Query type {query_key} is unimplemented for class {self._class_name}")
self.session = session
return getattr(self, self._query_method_map[query_key])(**kwargs)
def execute_query(self, sql_statement, with_keys=True, **kwargs):
"""Execute sql statemet, apply limit, and return results as dict if needed"""
# TODO: check count first, way to iterate
# sql_statement += f' LIMIT {self.max_limit}'
result = self.session.execute(sql_statement, kwargs)
keys = result.keys() # get keys before fetching
result = result.fetchall()
self.session.commit()
# create a list of dict with the keys and values of the results (instead of tuples)
if with_keys:
result = [dict(zip(keys, res)) for res in result]
return result
def _base_count(self, table_name: str, available_groupbys: Set[str], groupby: Optional[List[str]] = None):
if groupby:
bad_groups = set(groupby) - available_groupbys
if bad_groups:
raise AttributeError(f"The following groups are not permissible: {missing}")
global_str = ", ".join(groupby)
select_str = global_str + ", "
extra_str = f"""GROUP BY {global_str}\nORDER BY {global_str}"""
else:
select_str = ""
extra_str = ""
sql_statement = f"""
select {select_str}count(*) from {table_name}
{extra_str}
"""
ret = self.execute_query(sql_statement, with_keys=True)
if groupby:
return ret
else:
return ret[0]["count"]
@staticmethod
def _raise_missing_attribute(cls, query_key, missing_attribute, amend_msg=""):
"""Raises error for missing attribute in a message suitable for the REST user"""
raise AttributeError(f"To query {cls._class_name} for {query_key} " f"you must provide {missing_attribute}.")
# ----------------------------------------------------------------------------
class TaskQueries(QueryBase):
_class_name = "task"
_query_method_map = {"counts": "_task_counts"}
def _task_counts(self):
sql_statement = f"""
SELECT tag, priority, status, count(*)
FROM task_queue
WHERE True
group by tag, priority, status
order by tag, priority, status
"""
return self.execute_query(sql_statement, with_keys=True)
# ----------------------------------------------------------------------------
class DatabaseStatQueries(QueryBase):
_class_name = "database_stats"
_query_method_map = {
"table_count": "_table_count",
"database_size": "_database_size",
"table_information": "_table_information",
}
def _table_count(self, table_name=None):
if table_name is None:
self._raise_missing_attribute("table_name", "table name")
sql_statement = f"SELECT count(*) from {table_name}"
return self.execute_query(sql_statement, with_keys=False)[0]
def _database_size(self):
sql_statement = f"SELECT pg_database_size('{self.database_name}')"
return self.execute_query(sql_statement, with_keys=True)[0]["pg_database_size"]
def _table_information(self):
sql_statement = f"""
SELECT relname AS table_name
, c.reltuples::BIGINT AS row_estimate
, pg_total_relation_size(c.oid) AS total_bytes
, pg_indexes_size(c.oid) AS index_bytes
, pg_total_relation_size(reltoastrelid) AS toast_bytes
FROM pg_class c
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE relkind = 'r';
"""
result = self.execute_query(sql_statement, with_keys=False)
ret = []
for row in result:
if ("pg_" in row[0]) or ("sql_" in row[0]):
continue
ret.append(list(row))
ret = {"columns": ["table_name", "row_estimate", "total_bytes", "index_bytes", "toast_bytes"], "rows": ret}
return ret
class ResultQueries(QueryBase):
_class_name = "result"
_query_method_map = {"count": "_count"}
def _count(self, groupby: Optional[List[str]] = None):
available_groupbys = {"result_type", "status"}
return self._base_count("base_result", available_groupbys, groupby=groupby)
class MoleculeQueries(QueryBase):
_class_name = "molecule"
_query_method_map = {"count": "_count"}
def _count(self, groupby: Optional[List[str]] = None):
available_groupbys = set()
return self._base_count("molecule", available_groupbys, groupby=groupby)
# ----------------------------------------------------------------------------
class TorsionDriveQueries(QueryBase):
_class_name = "torsiondrive"
_query_method_map = {
"initial_molecules": "_get_initial_molecules",
"initial_molecules_ids": "_get_initial_molecules_ids",
"final_molecules": "_get_final_molecules",
"final_molecules_ids": "_get_final_molecules_ids",
"return_results": "_get_return_results",
}
def _get_initial_molecules_ids(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("initial_molecules_ids", "torsion drive id")
sql_statement = f"""
select initial_molecule from optimization_procedure as opt where opt.id in
(
select opt_id from optimization_history where torsion_id = {torsion_id}
)
order by opt.id
"""
return self.execute_query(sql_statement, with_keys=False)
def _get_initial_molecules(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("initial_molecules", "torsion drive id")
sql_statement = f"""
select molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.initial_molecule
where opt.id in
(select opt_id from optimization_history where torsion_id = {torsion_id})
"""
return self.execute_query(sql_statement, with_keys=True)
def _get_final_molecules_ids(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("final_molecules_ids", "torsion drive id")
sql_statement = f"""
select final_molecule from optimization_procedure as opt where opt.id in
(
select opt_id from optimization_history where torsion_id = {torsion_id}
)
order by opt.id
"""
return self.execute_query(sql_statement, with_keys=False)
def _get_final_molecules(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("final_molecules", "torsion drive id")
sql_statement = f"""
select molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.final_molecule
where opt.id in
(select opt_id from optimization_history where torsion_id = {torsion_id})
"""
return self.execute_query(sql_statement, with_keys=True)
def _get_return_results(self, torsion_id=None):
"""All return results ids of a torsion drive"""
if torsion_id is None:
self._raise_missing_attribute("return_results", "torsion drive id")
sql_statement = f"""
select opt_res.opt_id, result.id as result_id, result.return_result from result
join opt_result_association as opt_res
on result.id = opt_res.result_id
where opt_res.opt_id in
(
select opt_id from optimization_history where torsion_id = {torsion_id}
)
"""
return self.execute_query(sql_statement, with_keys=False)
class OptimizationQueries(QueryBase):
_class_name = "optimization"
_exclude = ["molecule_hash", "molecular_formula", "result_type"]
_query_method_map = {
"all_results": "_get_all_results",
"final_result": "_get_final_results",
"initial_molecule": "_get_initial_molecules",
"final_molecule": "_get_final_molecules",
}
def _remove_excluded_keys(self, data):
for key in self._exclude:
data.pop(key, None)
def _get_all_results(self, optimization_ids: List[Union[int, str]] = None):
"""Returns all the results objects (trajectory) of each optmization
Returns list(list) """
if optimization_ids is None:
self._raise_missing_attribute("all_results", "List of optimizations ids")
# row_to_json(result.*)
sql_statement = text(
"""
select * from base_result
join (
select opt_id, result.* from result
join opt_result_association as traj
on result.id = traj.result_id
where traj.opt_id in :optimization_ids
) result
on base_result.id = result.id
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(ResultORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
if key not in ret:
ret[key] = []
ret[key].append(ResultRecord(**rec))
return ret
def _get_final_results(self, optimization_ids: List[Union[int, str]] = None):
"""Return the actual results objects of the best result in each optimization"""
if optimization_ids is None:
self._raise_missing_attribute("final_result", "List of optimizations ids")
sql_statement = text(
"""
select * from base_result
join (
select opt_id, result.* from result
join (
select opt.opt_id, opt.result_id, max_pos from opt_result_association as opt
inner join (
select opt_id, max(position) as max_pos from opt_result_association
where opt_id in :optimization_ids
group by opt_id
) opt2
on opt.opt_id = opt2.opt_id and opt.position = opt2.max_pos
) traj
on result.id = traj.result_id
) result
on base_result.id = result.id
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(ResultORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
ret[key] = ResultRecord(**rec)
return ret
def _get_initial_molecules(self, optimization_ids=None):
if optimization_ids is None:
self._raise_missing_attribute("initial_molecule", "List of optimizations ids")
sql_statement = text(
"""
select opt.id as opt_id, molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.initial_molecule
where opt.id in :optimization_ids
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(MoleculeORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
rec = {k: v for k, v in rec.items() if v is not None}
ret[key] = Molecule(**rec)
return ret
def _get_final_molecules(self, optimization_ids=None):
if optimization_ids is None:
self._raise_missing_attribute("final_molecule", "List of optimizations ids")
sql_statement = text(
"""
select opt.id as opt_id, molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.final_molecule
where opt.id in :optimization_ids
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(MoleculeORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
rec = {k: v for k, v in rec.items() if v is not None}
ret[key] = Molecule(**rec)
return ret
|
bsd-3-clause
| -2,396,430,855,391,345,700
| 32.523918
| 117
| 0.575865
| false
| 4.10516
| false
| false
| false
|
wojtex/cantionale
|
title_index.py
|
1
|
1914
|
class TitleIndex:
def __init__(self, songbook, params):
self.title = ''
self.filter = lambda x : True
if 'title' in params: self.title = params['title']
if 'filter' in params: self.filter = params['filter']
def draw(self, canvas, songbook):
sb = songbook
st = sb.style
c = canvas
wdt = sb.width
position = sb.height - st.title_index_margin_top
c.setFont(st.title_index_title_font_name, st.title_index_title_font_size)
for line in self.title.strip().split(sep='\n'):
position -= st.title_index_title_line_height
c.drawCentredString(wdt/2, position, line)
position -= st.title_index_title_song_spacing
songs = []
for section in songbook.sections:
for no, song in enumerate(section.songs):
if self.filter((no,song)):
songs.append((song.title, section.index(no+1)))
songs.sort()
if sb.is_left_page(c):
margin_left = st.title_index_margin_outer
margin_right = st.title_index_margin_inner
else:
margin_left = st.toc_margin_inner
margin_right = st.toc_margin_outer
lh = st.title_index_song_line_height
for title, index in songs:
if lh + st.title_index_margin_bottom > position:
c.showPage()
position = sb.height - st.title_index_margin_top
if sb.is_left_page(c):
margin_left = st.title_index_margin_outer
margin_right = st.title_index_margin_inner
else:
margin_left = st.title_index_margin_inner
margin_right = st.title_index_margin_outer
position -= st.title_index_song_song_spacing
position -= lh
c.setFont(st.title_index_song_number_font_name, st.title_index_song_number_font_size)
c.drawRightString(st.title_index_song_number_indent + margin_left, position, index)
c.setFont(st.title_index_song_title_font_name, st.title_index_song_title_font_size)
c.drawString(st.title_index_song_title_indent + margin_left, position, title)
c.showPage()
if sb.is_left_page(c):
c.showPage()
|
mit
| -7,055,911,410,535,979,000
| 33.178571
| 88
| 0.692268
| false
| 2.835556
| false
| false
| false
|
mvaled/sentry
|
src/debug_toolbar/panels/sql/forms.py
|
1
|
2785
|
from __future__ import absolute_import, unicode_literals
import json
import hashlib
from django import forms
from django.conf import settings
from django.db import connections
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.core.exceptions import ValidationError
from debug_toolbar.panels.sql.utils import reformat_sql
class SQLSelectForm(forms.Form):
"""
Validate params
sql: The sql statement with interpolated params
raw_sql: The sql statement with placeholders
params: JSON encoded parameter values
duration: time for SQL to execute passed in from toolbar just for redisplay
hash: the hash of (secret + sql + params) for tamper checking
"""
sql = forms.CharField()
raw_sql = forms.CharField()
params = forms.CharField()
alias = forms.CharField(required=False, initial="default")
duration = forms.FloatField()
hash = forms.CharField()
def __init__(self, *args, **kwargs):
initial = kwargs.get("initial", None)
if initial is not None:
initial["hash"] = self.make_hash(initial)
super(SQLSelectForm, self).__init__(*args, **kwargs)
for name in self.fields:
self.fields[name].widget = forms.HiddenInput()
def clean_raw_sql(self):
value = self.cleaned_data["raw_sql"]
if not value.lower().strip().startswith("select"):
raise ValidationError("Only 'select' queries are allowed.")
return value
def clean_params(self):
value = self.cleaned_data["params"]
try:
return json.loads(value)
except ValueError:
raise ValidationError("Is not valid JSON")
def clean_alias(self):
value = self.cleaned_data["alias"]
if value not in connections:
raise ValidationError("Database alias '%s' not found" % value)
return value
def clean_hash(self):
hash = self.cleaned_data["hash"]
if hash != self.make_hash(self.data):
raise ValidationError("Tamper alert")
return hash
def reformat_sql(self):
return reformat_sql(self.cleaned_data["sql"])
def make_hash(self, data):
items = [settings.SECRET_KEY, data["sql"], data["params"]]
# Replace lines endings with spaces to preserve the hash value
# even when the browser normalizes \r\n to \n in inputs.
items = [" ".join(force_text(item).splitlines()) for item in items]
return hashlib.sha1("".join(items).encode("utf-8")).hexdigest()
@property
def connection(self):
return connections[self.cleaned_data["alias"]]
@cached_property
def cursor(self):
return self.connection.cursor()
|
bsd-3-clause
| -6,826,950,877,515,031,000
| 28.946237
| 83
| 0.647038
| false
| 4.311146
| false
| false
| false
|
manusev/plugin.video.kuchitv
|
resources/regex/freebroadcast.py
|
1
|
5105
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# MonsterTV - XBMC Add-on by Juarrox (juarrox@gmail.com)
# Version 0.2.9 (18.07.2014)
#------------------------------------------------------------
# License: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
# Gracias a la librería plugintools de Jesús (www.mimediacenter.info)
import os
import sys
import urllib
import urllib2
import re
import shutil
import zipfile
import time
import xbmc
import xbmcgui
import xbmcaddon
import xbmcplugin
import plugintools
import json
addonName = xbmcaddon.Addon().getAddonInfo("name")
addonVersion = xbmcaddon.Addon().getAddonInfo("version")
addonId = xbmcaddon.Addon().getAddonInfo("id")
addonPath = xbmcaddon.Addon().getAddonInfo("path")
# Función que guía el proceso de elaboración de la URL original
def freebroadcast(params):
plugintools.log("[MonsterTV-0.3.0].freebroadcast "+repr(params))
url_user = {}
# Construimos diccionario...
url = params.get("url")
url_extracted = url.split(" ")
for entry in url_extracted:
if entry.startswith("rtmp"):
entry = entry.replace("rtmp=", "")
url_user["rtmp"]=entry
elif entry.startswith("playpath"):
entry = entry.replace("playpath=", "")
url_user["playpath"]=entry
elif entry.startswith("swfUrl"):
entry = entry.replace("swfUrl=", "")
url_user["swfurl"]=entry
elif entry.startswith("pageUrl"):
entry = entry.replace("pageUrl=", "")
url_user["pageurl"]=entry
elif entry.startswith("token"):
entry = entry.replace("token=", "")
url_user["token"]=entry
elif entry.startswith("referer"):
entry = entry.replace("referer=", "")
url_user["referer"]=entry
plugintools.log("URL_user dict= "+repr(url_user))
pageurl = url_user.get("pageurl")
# Controlamos ambos casos de URL: Único link (pageUrl) o link completo rtmp://...
if pageurl is None:
pageurl = url_user.get("url")
referer= url_user.get("referer")
if referer is None:
referer = 'http://www.juanin.tv'
# channel_id = re.compile('channel=([^&]*)').findall(pageurl)
# print channel_id
# channel_id = channel_id[0]
pageurl = 'http://freebroadcast.pw/embed/embed.php?n=' + url_user.get("playpath") + '&w=670&h=400'
url_user["pageurl"]=pageurl
print 'pageurl',pageurl
print 'referer',referer
body = gethttp_headers(pageurl, referer)
getparams_freebroadcast(url_user, body)
url = url_user.get("ip") + ' playpath=' + url_user.get("playpath") + ' swfUrl=http://freebroadcast.pw/player/player.swf pageUrl=' + url_user.get("pageurl") + ' live=1 timeout=10'
plugintools.play_resolved_url(url)
# Vamos a hacer una llamada al pageUrl
def gethttp_headers(pageurl, referer):
request_headers=[]
request_headers.append(["User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.65 Safari/537.31"])
# request_headers.append(["Referer",referer])
body,response_headers = plugintools.read_body_and_headers(pageurl, headers=request_headers)
plugintools.log("body= "+body)
return body
# Iniciamos protocolo de elaboración de la URL original
# Capturamos parámetros correctos
def getparams_freebroadcast(url_user, body):
plugintools.log("[MonsterTV-0.3.0].getparams_freebroadcast " + repr(url_user) )
# Construimos el diccionario de 9stream
entry = plugintools.find_single_match(body, 'setStream(token) {(.*?)}')
ip = re.compile("streamer', \'(.*?)\'").findall(body)
url_user["ip"]=str(ip[0])
plugintools.log("IP= "+str(ip[0]))
# Vamos a capturar el playpath
def getfile_freebroadcast(url_user, decoded, body):
plugintools.log("MonsterTV getfile_freebroadcast( "+repr(url_user))
referer = url_user.get("referer")
req = urllib2.Request(decoded)
req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36')
req.add_header('Referer', referer)
response = urllib2.urlopen(req)
print response
data = response.read()
print data
file = re.compile("file': '([^.]*)").findall(data)
print 'file',file
return file
# Vamos a capturar el fileserver.php (token del server)
def get_fileserver(decoded, url_user):
plugintools.log("MonsterTV fileserver "+repr(url_user))
referer=url_user.get("pageurl")
req = urllib2.Request(decoded)
req.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36')
req.add_header('Referer',referer)
response = urllib2.urlopen(req)
print response
data = response.read()
print data
token = re.compile('token":"(.*)"').findall(data)
print 'token',token
return token
|
gpl-2.0
| 6,524,703,570,245,585,000
| 34.395833
| 182
| 0.627232
| false
| 3.400267
| false
| false
| false
|
soybean217/lora-python
|
UServer/admin_server/admin_data_update/model/gateway_locaton_data.py
|
1
|
1618
|
# _*_ coding:utf-8 _*_
from database.db4 import db4, Channel4, ConstDB4
from utils.log import Logger, Action
class Location:
channel_name = Channel4.gis_gateway_location + '*'
def __init__(self):
self.ps = db4.pubsub()
def psubscribe_gis(self):
self.ps.psubscribe(self.channel_name)
return self.ps
def stop_listen(self):
if hasattr(self, 'ps'):
self.ps.punsubscribe()
def listen_gis_gateway_location(self):
Logger.info(Action.listen, 'psubscribe', self.channel_name, 'Begin listen')
ps_init = self.psubscribe_gis()
for item in ps_init.listen():
if item is not None:
if item['type'] == 'pmessage':
Logger.info(Action.listen, item['channel'].decode(), 'MESSAGE', item['data'].decode())
gateway_id = item['channel'].decode().split(':')[1]
location_data = item['data'].decode().split(',')
if len(location_data) == 3:
lng = float(location_data[0])
lat = float(location_data[1])
alt = int(location_data[2])
msg = self.Object(gateway_id, lat=lat, lng=lng, alt=alt)
yield msg
else:
Logger.info(Action.listen, item['channel'].decode(), item['type'], item['data'])
class Object:
def __init__(self, gw_id, lat, lng, alt):
self.gateway_id = gw_id
self.latitude = lat
self.longitude = lng
self.altitude = alt
|
mit
| -8,514,074,294,372,980,000
| 34.173913
| 106
| 0.520396
| false
| 4.00495
| false
| false
| false
|
geishatokyo-lightning/lightning
|
lightning_core/vg/cssanim.py
|
1
|
14273
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011 Geisha Tokyo Entertainment, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import with_statement
import os
import sys
from lxml import etree
from copy import deepcopy
from parser import *
from StringIO import StringIO
import logging
import simplejson as json
import re
from collections import deque
from copy import deepcopy
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(message)s',
datefmt='%Y/%m/%d %H:%M:%S')
class CssElement(dict):
def __init__(self, title=''):
super(CssElement, self).__init__()
self.common_element = {
'position' : ['position', 'absolute'],
'transform': ['-webkit-transform', None],
'origin' : ['-webkit-transform-origin', '0.0px 0.0px'],
}
self.animation_element = {
'name' : ['-webkit-animation-name', None],
'timing' : ['-webkit-animation-timing-function', 'linear'],
'count' : ['-webkit-animation-iteration-count', 'infinite'],
'duration' : ['-webkit-animation-duration', None],
}
self.shape_element = {
'left' : ['left' , None],
'top' : ['top' , None],
'width' : ['width' , None],
'height' : ['height' , None],
}
self.title = title
self.sp = '\n' # splitter
def __str__(self):
content = self.sp.join(['%s: %s;' % (k,v) for k,v in self.iteritems()])
return '%s {%s%s%s}%s' % (self.title, self.sp, content, self.sp, self.sp)
def add_anims_element(self, key, anim_length, has_anim_name):
self.animation_element['name'][1] = key
self.animation_element['duration'][1] = '%fs'%(float(anim_length)/12.0)
if not has_anim_name:
del self.animation_element['name']
self.update(self.animation_element.values())
self.update(self.common_element.values())
def add_shape_element(self, shape_key, shape_table):
def calc_twips_to_pixel(twips):
return '%dpx' % int(round(float(twips)/20))
shape = shape_table[shape_key]
self.shape_element['left'][1] = calc_twips_to_pixel(shape.left)
self.shape_element['top'][1] = calc_twips_to_pixel(shape.top)
self.shape_element['width'][1] = calc_twips_to_pixel(shape.width)
self.shape_element['height'][1] = calc_twips_to_pixel(shape.height)
self.update(self.shape_element.values())
del self.common_element['origin']
def add_origin_element(self, matrix):
self.common_element['transform'][1] = matrix
self.update(self.common_element.values())
class SvgShape(object):
def __init__(self, elem):
self.obj = elem[0]
self.hash = elem[1]
self.left = int(elem[2])
self.top = int(elem[3])
self.width = int(elem[4])
self.height = int(elem[5])
self.symbol = ''
self.edges = []
self.defs =[]
def filename(self, dir_path='.'):
return os.path.join(dir_path, '%s_%s.svg' % (self.obj, self.hash))
class SvgTransform(Transform):
def __init__(self, attrib):
super(SvgTransform,self).__init__()
values = dict([(k,float(attrib[k])) if k in attrib else (k,None) for k in self.MATRIX])
self.set_items(values)
if 'depth' in attrib:
self.depth = int(attrib['depth'])
if 'ctf' in attrib:
self.ctf = ColorTransform([int(ctf.strip()) for ctf in attrib['ctf'].strip('[]').split(',') if ctf.strip().lstrip('-').isdigit()])
if 'clipDepth' in attrib:
self.clipDepth = int(attrib['clipDepth'])
if 'visible' in attrib and attrib['visible'] == 'False':
self.visible = False
def __eq__(self, other):
return [self.sx, self.sy, self.wx, self.wy, self.tx, self.ty, self.get_opacity()]==other
def write_matrix(self):
return self._shorten('matrix(%.6f,%.6f,%.6f,%.6f,%.6f,%.6f)' % self.get_matrix())
def write_matrix3d(self):
return self._shorten('matrix3d(%.6f,%.6f,0,0,%.6f,%.6f,0,0,0,0,1,0,%.6f,%.6f,0,1)' % (self.sx, self.wx, self.wy, self.sy, self.tx/20, self.ty/20))
def write_webkit_transform(self):
return self._shorten('-webkit-transform: %s;' % self.write_matrix3d())
def _shorten(self, str):
return str.replace('.000000', '.0')
def get_opacity(self):
opacity = 1.0
if not self.visible:
opacity = 0.0
else:
if len(self.ctf) == 8:
c = Color([0,0,0,256])
c.transform(self.ctf)
opacity = (float(c.a) / 256.0)
return opacity
def write_visible(self):
return self._shorten('opacity: %.6f;' % self.get_opacity())
class AnimationManager(object):
def __init__(self, dir_path, basefilename):
self.dir_path = dir_path
self.shapes_filepath = self._get_path('shapes')
self.animation_filepath = self._get_path('animation.xml')
self.structure_filepath = self._get_path('structure.xml')
self.cssfilepath = os.path.join('.', basefilename + '.css')
self.htmlfilepath = os.path.join('.', basefilename + '.html')
self.xmlfilename = os.path.basename(basefilename.replace('.svg',''));
def _get_path(self, filename):
return os.path.join(self.dir_path, filename)
def load_shapes(self):
with open(self.shapes_filepath, 'r') as f:
return self.get_shapes(f.readlines())
def get_shapes(self, lines):
shape_table = {}
for line in lines:
elems = line.split(' ')
if len(elems) == 6: # 'shapes'
shape_table[elems[0]] = SvgShape(elems)
return shape_table
def load_animation(self):
root = self._parse_xml(self.animation_filepath)
return self.get_animation(root)
def get_animation(self, root):
anim_table = {}
for anim in root.xpath('//animation'):
key = anim.attrib['key'][:-2]
frames = anim.findall('frame')
anim_table[key] = [SvgTransform(frame.attrib) for frame in frames]
return anim_table
def load_structure(self, shape_table, parser_shapes):
root = self._parse_xml(self.structure_filepath)
return self.get_structure(root, shape_table, parser_shapes)
def get_structure(self, root, shape_table, anim_table, ctfsArray, parser_shapes, mcname=None, key_prefix=""):
def get_parent_key(elem):
parent = elem.getparent()
if parent is not None and parent.attrib.has_key('class'):
p_attrib_cls = parent.attrib['class']
s = re.search('obj\d+', p_attrib_cls)
if s is not None:
return s.group()
else:
return ''
def update_elem(elem, key, name, hasClipDepth):
elem.tag = 'div'
elem.attrib.clear()
elem.attrib['class'] = key
if name is not None :
elem.attrib['id'] = name
if hasClipDepth:
elem.attrib['style'] = 'display:none;'
structure_table = {}
if mcname is None:
root_elem = root
else:
r = root.xpath('//part[@name="%s"]'%mcname)
if r is None:
root_elem = root
else:
root_elem = r[0]
for elem in root.xpath('//part'):
if 'key' in elem.attrib:
key = elem.attrib['key']
objId = LUtil.objectID_from_key(key)
depth = elem.attrib['depth']
hasClipDepth = 'clipDepth' in elem.attrib
name = elem.attrib['name'] if 'name' in elem.attrib else None
ctf = json.loads(elem.attrib['ctf'])
if len(ctf) > 1:
ctfsArray.append({key:ctf})
key_depth = LUtil.make_key_string(objId, prefix=key_prefix, suffix=depth)
structure_table[key_depth] = SvgTransform(elem.attrib)
update_elem(elem, key_depth, name, hasClipDepth)
k = objId[3:]
if (len(elem) == 0) and (k in parser_shapes):
shape_key = LUtil.make_key_string(objId, prefix=key_prefix, suffix='shape')
parent_key = get_parent_key(elem)
childdiv = etree.Element('div')
childdiv.set('class', shape_key)
structure_table[shape_key] = SvgTransform(childdiv.attrib)
svgelem = Parser.str_shape_as_svg(parser_shapes[k], ctfsArray, parent_key)
childdiv.append(svgelem)
elem.append(childdiv)
structure_tree = deepcopy(root_elem)
return structure_table, structure_tree
def _parse_xml(self, filepath):
with open(filepath, 'r') as f:
return etree.parse(f)
return None
def _remove_deplicated_keyframes(self, anim_elements):
anim_buffer = deque()
result = []
for percent, transform in anim_elements:
anim_buffer.append((percent, transform))
if len(anim_buffer) == 3:
if anim_buffer[0][1] == anim_buffer[1][1] and anim_buffer[0][1] == anim_buffer[2][1]:
anim_buffer = deque((anim_buffer[0], anim_buffer[2]))
else:
result.append(anim_buffer.popleft())
result.extend(list(anim_buffer))
return result
def _interpolate_keyframes(self, anim_elements, eps=0.0001):
result = []
old_transform = None
for i, (percent, transform) in enumerate(anim_elements):
if old_transform is not None:
if (not old_transform.visible and transform.visible):
temp_transform = deepcopy(transform)
temp_transform.visible = old_transform.visible
result.append((percent - eps, temp_transform))
elif (old_transform.visible and not transform.visible):
result.append((percent - eps, old_transform))
result.append((percent, transform))
old_transform = transform
if len(result) > 0:
result.append((100.0, result[0][1])) # 100% animation
return result
def _make_keyframes(self, anim_table, key_prefix='', sp='\n'):
keyframes = []
for key, value in anim_table.iteritems():
anim_length = len(value)
anim_elements = [((float(i*100)/float(anim_length)), a) for i,a in enumerate(value)]
anim_list = ['%f%% { %s %s }' % (percent, a.write_webkit_transform(), a.write_visible()) for percent, a in self._interpolate_keyframes(self._remove_deplicated_keyframes(anim_elements))]
anim = sp.join(anim_list)
keyframes.append(sp.join(['@-webkit-keyframes %s {'%(key), anim, '}']))
return (sp+sp).join(keyframes)
def _make_transform(self, structure_table, shape_table, anim_table, key_prefix='', has_anim_name=True, sp='\n'):
result = []
for key, structure in structure_table.iteritems():
elem = CssElement(title='.%s'%key)
transform = ('-webkit-transform', structure.write_matrix())
if key in anim_table:
anim_length = len(anim_table[key])
elem.add_anims_element(key, anim_length, has_anim_name)
shape_key = LUtil.objectID_from_key(key)
if key.endswith('shape') and shape_key in shape_table:
elem.add_shape_element(shape_key, shape_table)
elem.add_origin_element(structure.write_matrix())
result.append(str(elem))
return (sp+sp).join(result)
def write_html(self, structure_tree, cssfilepath):
template = '''<?xml version="1.0" encoding="utf-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="ja" xml:lang="ja">
<head>
<meta http-equiv="Content-Type" content="application/xhtml+xml; charset=utf-8"></meta>
<link href="%s" type="text/css" rel="stylesheet"></link>
<title>lightning</title>
</head>
<body>
%s
</body>
</html>
'''
html = template % (cssfilepath, etree.tostring(structure_tree, pretty_print=True))
return html
def write_div(self, structure_tree):
html = "%s" % (etree.tostring(structure_tree, pretty_print=True))
return html
def write_css(self, structure_table, shape_table, anim_table, key_prefix='', has_anim_name=True, sp='\n\n'):
elem = CssElement(title='div')
css = sp.join([self._make_keyframes(anim_table, key_prefix), self._make_transform(structure_table, shape_table, anim_table, key_prefix, has_anim_name)])
return 'svg { display:block; }\n' + css
def _write(self, filepath, content):
with open(filepath, 'w') as f:
f.write(content)
|
mit
| -3,026,379,057,328,387,000
| 39.092697
| 197
| 0.577664
| false
| 3.682405
| false
| false
| false
|
jayantk/jklol
|
scripts/sequence/generate_emission_features.py
|
1
|
1090
|
#!/usr/local/lib/python2.6
import re
import sys
filename = sys.argv[1]
def generate_string_features(word, label):
dict = {}
'''
patterns = ['\d$', '\d\d$', '\d\d\d+$', '\d?\d?:\d\d$',
'[0-9:]+$', '[A-Z]', '[A-Z]$', '[A-Z][A-Z]$',
'[A-Z]+$', '[^0-9A-Za-z]+$', '[^0-9]+$', '[A-Za-z]+$',
'[a-z]+$']
for pattern in patterns:
if re.match(pattern, word):
dict['regex=' + pattern + '_label=' + label] = 1
'''
dict['bias_label=' + label] = 1
dict['word=' + word.lower() + '_label=' + label] = 1
return dict
words = set()
labels = set()
with open(filename, 'r') as f:
for line in f:
chunks = line.strip().split(" ")
for i in range(0, len(chunks), 2):
words.add(chunks[i].strip())
labels.add(chunks[i + 1].strip())
for word in words:
for label in labels:
features = generate_string_features(word, label)
for feature in features.keys():
print "%s@#@#@%s@#@#@%s@#@#@%d" % (word, label, feature, features[feature])
|
bsd-2-clause
| 780,676,244,806,136,700
| 26.25
| 87
| 0.478899
| false
| 3.01105
| false
| false
| false
|
praba230890/PYPOWER
|
pypower/t/t_case_ext.py
|
2
|
4105
|
# Copyright (c) 1996-2015 PSERC. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Case data in external format.
"""
from numpy import array, ones, arange, r_
def t_case_ext():
"""Case data in external format used to test C{ext2int} and C{int2ext}.
"""
ppc = {}
## PYPOWER Case Format : Version 2
ppc['version'] = '2'
##----- Power Flow Data -----##
## system MVA base
ppc['baseMVA'] = 100.0
## bus data
# bus_i type Pd Qd Gs Bs area Vm Va baseKV zone Vmax Vmin
ppc['bus'] = array([
[1, 3, 0, 0, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[2, 2, 0, 0, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[30, 2, 0, 0, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[4, 1, 0, 0, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[5, 1, 90, 30, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[20, 4, 0, 0, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[6, 1, 0, 0, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[7, 1, 100, 35, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[8, 1, 0, 0, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9],
[9, 1, 125, 50, 0, 0, 1, 1, 0, 345, 1, 1.1, 0.9]
])
## generator data
# bus, Pg, Qg, Qmax, Qmin, Vg, mBase, status, Pmax, Pmin, Pc1, Pc2,
# Qc1min, Qc1max, Qc2min, Qc2max, ramp_agc, ramp_10, ramp_30, ramp_q, apf
ppc['gen'] = array([
[30, 85, 0, 300, -300, 1, 100, 1, 270, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[2, 163, 0, 300, -300, 1, 100, 1, 300, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[20, 20, 0, 300, -300, 1, 100, 1, 200, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 300, -300, 1, 100, 1, 250, 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
], float)
## branch data
# fbus, tbus, r, x, b, rateA, rateB, rateC, ratio, angle, status, angmin, angmax
ppc['branch'] = array([
[1, 4, 0, 0.0576, 0, 0, 250, 250, 0, 0, 1, -360, 360],
[4, 5, 0.017, 0.092, 0.158, 0, 250, 250, 0, 0, 1, -360, 360],
[5, 6, 0.039, 0.17, 0.358, 150, 150, 150, 0, 0, 1, -360, 360],
[30, 6, 0, 0.0586, 0, 0, 300, 300, 0, 0, 1, -360, 360],
[6, 7, 0.0119, 0.1008, 0.209, 40, 150, 150, 0, 0, 1, -360, 360],
[7, 8, 0.0085, 0.072, 0.149, 250, 250, 250, 0, 0, 1, -360, 360],
[8, 20, 0, 0.1, 0, 250, 250, 250, 0, 0, 1, -360, 360],
[8, 2, 0, 0.0625, 0, 250, 250, 250, 0, 0, 1, -360, 360],
[8, 9, 0.032, 0.161, 0.306, 250, 250, 250, 0, 0, 1, -360, 360],
[9, 4, 0.01, 0.085, 0.176, 250, 250, 250, 0, 0, 1, -360, 360]
])
##----- OPF Data -----##
## area data
# area refbus
ppc['areas'] = array([
[2, 20],
[1, 5]
], float)
## generator cost data
# 1 startup shutdown n x1 y1 ... xn yn
# 2 startup shutdown n c(n-1) ... c0
ppc['gencost'] = array([
[2, 0, 0, 2, 15, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 4, 0, 0, 100, 2500, 200, 5500, 250, 7250],
[2, 0, 0, 2, 20, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 4, 0, 0, 100, 2000, 200, 4403.5, 270, 6363.5]
])
ppc['A'] = array([
[1, 2, 3, 4, 5, 0, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 17, 18, 19, 20, 21, 22, 0, 24, 25, 26, 0, 28, 29, 30],
[2, 4, 6, 8, 10, 0, 14, 16, 18, 20, 22, 24, 26, 28, 30, 0, 34, 36, 38, 40, 42, 44, 0, 48, 50, 52, 0, 56, 58, 60]
], float)
ppc['N'] = array([
[30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1],
[60, 58, 56, 54, 52, 50, 48, 46, 44, 42, 40, 38, 36, 34, 32, 30, 28, 26, 24, 22, 20, 18, 16, 14, 12, 10, 8, 6, 4, 2],
], float)
ppc['xbus'] = arange(100, dtype=float).reshape((10, 10))
ppc['xgen'] = arange(16, dtype=float).reshape((4, 4))
ppc['xbranch'] = ppc['xbus'].copy()
ppc['xrows'] = r_[ppc['xbranch'][:, :4], ppc['xgen'], ppc['xbus'][:, :4], -ones((2, 4))]
ppc['xcols'] = ppc['xrows'].T
ppc['x'] = { 'more': ppc['xgen'] }
return ppc
|
bsd-3-clause
| -4,254,840,702,159,437,000
| 40.887755
| 125
| 0.436541
| false
| 2.18119
| false
| false
| false
|
ellmetha/django-machina
|
machina/core/loading.py
|
1
|
3900
|
import sys
import traceback
from django.conf import settings
class AppNotFoundError(Exception):
pass
class ClassNotFoundError(Exception):
pass
def get_class(module_label, classname):
return get_classes(module_label, [classname, ])[0]
def get_classes(module_label, classnames):
""" Imports a set of classes from a given module.
Usage::
get_classes('forum.models', ['Forum', 'ForumReadTrack', ])
"""
app_label = module_label.split('.')[0]
app_module_path = _get_app_module_path(module_label)
if not app_module_path:
raise AppNotFoundError('No app found matching \'{}\''.format(module_label))
# Determines the full module path by appending the module label
# to the base package path of the considered application.
module_path = app_module_path
if '.' in app_module_path:
base_package = app_module_path.rsplit('.' + app_label, 1)[0]
module_path = '{}.{}'.format(base_package, module_label)
# Try to import this module from the related app that is specified
# in the Django settings.
local_imported_module = _import_module(module_path, classnames)
# If the module we tried to import is not located inside the machina
# vanilla apps, try to import it from the corresponding machina app.
machina_imported_module = None
if not app_module_path.startswith('machina.apps'):
machina_imported_module = _import_module(
'{}.{}'.format('machina.apps', module_label), classnames,
)
if local_imported_module is None and machina_imported_module is None:
raise AppNotFoundError('Error importing \'{}\''.format(module_path))
# Any local module is prioritized over the corresponding machina module
imported_modules = [
m for m in (local_imported_module, machina_imported_module) if m is not None
]
return _pick_up_classes(imported_modules, classnames)
def _import_module(module_path, classnames):
""" Tries to import the given Python module path. """
try:
imported_module = __import__(module_path, fromlist=classnames)
return imported_module
except ImportError:
# In case of an ImportError, the module being loaded generally does not exist. But an
# ImportError can occur if the module being loaded exists and another import located inside
# it failed.
#
# In order to provide a meaningfull traceback, the execution information can be inspected in
# order to determine which case to consider. If the execution information provides more than
# a certain amount of frames, this means that an ImportError occured while loading the
# initial Python module.
__, __, exc_traceback = sys.exc_info()
frames = traceback.extract_tb(exc_traceback)
if len(frames) > 1:
raise
def _pick_up_classes(modules, classnames):
""" Given a list of class names to retrieve, try to fetch them from the specified list of
modules and returns the list of the fetched classes.
"""
klasses = []
for classname in classnames:
klass = None
for module in modules:
if hasattr(module, classname):
klass = getattr(module, classname)
break
if not klass:
raise ClassNotFoundError('Error fetching \'{}\' in {}'.format(
classname, str([module.__name__ for module in modules]))
)
klasses.append(klass)
return klasses
def _get_app_module_path(module_label):
""" Given a module label, loop over the apps specified in the INSTALLED_APPS to find the
corresponding application module path.
"""
app_name = module_label.rsplit('.', 1)[0]
for app in settings.INSTALLED_APPS:
if app.endswith('.' + app_name) or app == app_name:
return app
return None
|
bsd-3-clause
| 3,359,601,575,584,082,400
| 34.779817
| 100
| 0.65641
| false
| 4.257642
| false
| false
| false
|
ericdill/PyXRF
|
pyxrf/model/guessparam.py
|
1
|
27631
|
# ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
__author__ = 'Li Li'
import numpy as np
import six
import json
from collections import OrderedDict
import copy
import os
from atom.api import (Atom, Str, observe, Typed,
Int, Dict, List, Float, Enum, Bool)
from skxray.fitting.background import snip_method
from skxray.constants.api import XrfElement as Element
from skxray.fitting.xrf_model import (ModelSpectrum, ParamController,
trim, construct_linear_model, linear_spectrum_fitting)
#from pyxrf.model.fit_spectrum import fit_strategy_list
import logging
logger = logging.getLogger(__name__)
bound_options = ['none', 'lohi', 'fixed', 'lo', 'hi']
fit_strategy_list = ['fit_with_tail', 'free_more',
'e_calibration', 'linear',
'adjust_element1', 'adjust_element2', 'adjust_element3']
class Parameter(Atom):
# todo make sure that these are the only valid bound types
bound_type = Enum(*bound_options)
min = Float(-np.inf)
max = Float(np.inf)
value = Float()
default_value = Float()
fit_with_tail = Enum(*bound_options)
free_more = Enum(*bound_options)
adjust_element1 = Enum(*bound_options)
adjust_element2 = Enum(*bound_options)
adjust_element3 = Enum(*bound_options)
e_calibration = Enum(*bound_options)
linear = Enum(*bound_options)
name = Str()
description = Str()
tool_tip = Str()
@observe('name', 'bound_type', 'min', 'max', 'value', 'default_value')
def update_displayed_name(self, changed):
pass
# print(changed)
def __repr__(self):
return ("Parameter(bound_type={}, min={}, max={}, value={}, "
"default={}, free_more={}, adjust_element1={}, "
"adjust_element2={}, adjust_element3={}, "
"e_calibration={}, linear={}, description={}, "
"toop_tip={}".format(
self.bound_type, self.min, self.max, self.value, self.default_value,
self.free_more, self.adjust_element1, self.adjust_element2,
self.adjust_element3, self.e_calibration,
self.linear, self.description, self.tool_tip))
def to_dict(self):
return {
'bound_type': self.bound_type,
'min': self.min,
'max': self.max,
'value': self.value,
'default_value': self.default_value,
'fit_with_tail': self.fit_with_tail,
'free_more': self.free_more,
'adjust_element1': self.adjust_element1,
'adjust_element2': self.adjust_element2,
'adjust_element3': self.adjust_element3,
'e_calibration': self.e_calibration,
'linear': self.linear,
'name': self.name,
'description': self.description,
'tool_tip': self.tool_tip,
}
def format_dict(parameter_object_dict, element_list):
"""
Format the dictionary that scikit-xray expects.
Parameters
----------
parameter_object_dict : dict
element_list : list
Need to be transferred to str first, then save it to dict
"""
param_dict = {key: value.to_dict() for key, value
in six.iteritems(parameter_object_dict)}
elo = param_dict.pop('energy_bound_low')['value']
ehi = param_dict.pop('energy_bound_high')['value']
non_fitting_values = {'non_fitting_values': {
'energy_bound_low': elo,
'energy_bound_high': ehi,
'element_list': ', '.join(element_list)
}}
param_dict.update(non_fitting_values)
return param_dict
def dict_to_param(param_dict):
"""
Transfer param dict to parameter object.
Parameters
param_dict : dict
fitting parameter
"""
temp_parameters = copy.deepcopy(param_dict)
non_fitting_values = temp_parameters.pop('non_fitting_values')
element_list = non_fitting_values.pop('element_list')
if not isinstance(element_list, list):
element_list = [e.strip(' ') for e in element_list.split(',')]
#self.element_list = element_list
elo = non_fitting_values.pop('energy_bound_low')
ehi = non_fitting_values.pop('energy_bound_high')
param = {
'energy_bound_low': Parameter(value=elo,
default_value=elo,
description='E low limit [keV]'),
'energy_bound_high': Parameter(value=ehi,
default_value=ehi,
description='E high limit [keV]')
}
for param_name, param_dict in six.iteritems(temp_parameters):
if 'default_value' in param_dict:
param.update({param_name: Parameter(**param_dict)})
else:
param.update({
param_name: Parameter(default_value=param_dict['value'],
**param_dict)
})
return element_list, param
class PreFitStatus(Atom):
"""
Data structure for pre fit analysis.
Attributes
----------
z : str
z number of element
spectrum : array
spectrum of given element
status : bool
True as plot is visible
stat_copy : bool
copy of status
maxv : float
max value of a spectrum
norm : float
norm value respect to the strongest peak
lbd_stat : bool
define plotting status under a threshold value
"""
z = Str()
energy = Str()
spectrum = Typed(np.ndarray)
status = Bool(False)
stat_copy = Bool(False)
maxv = Float()
norm = Float()
lbd_stat = Bool(False)
class ElementController(object):
"""
This class performs basic ways to rank elements, show elements,
calculate normed intensity, and etc.
"""
def __init__(self):
self.element_dict = OrderedDict()
def delete_item(self, k):
try:
del self.element_dict[k]
self.update_norm()
logger.info('Item {} is deleted.'.format(k))
except KeyError, e:
logger.info(e)
def order(self, option='z'):
"""
Order dict in different ways.
"""
if option == 'z':
self.element_dict = OrderedDict(sorted(six.iteritems(self.element_dict),
key=lambda t: t[1].z))
elif option == 'energy':
self.element_dict = OrderedDict(sorted(six.iteritems(self.element_dict),
key=lambda t: t[1].energy))
elif option == 'name':
self.element_dict = OrderedDict(sorted(six.iteritems(self.element_dict),
key=lambda t: t[0]))
elif option == 'maxv':
self.element_dict = OrderedDict(sorted(six.iteritems(self.element_dict),
key=lambda t: t[1].maxv, reverse=True))
def add_to_dict(self, dictv):
self.element_dict.update(dictv)
self.update_norm()
def update_norm(self, threshv=0.1):
"""
Calculate the norm intensity for each element peak.
Parameters
----------
threshv : float
No value is shown when smaller than the shreshold value
"""
#max_dict = reduce(max, map(np.max, six.itervalues(self.element_dict)))
max_dict = np.max(np.array([v.maxv for v in six.itervalues(self.element_dict)]))
for v in six.itervalues(self.element_dict):
v.norm = v.maxv/max_dict*100
v.lbd_stat = bool(v.norm > threshv)
def delete_all(self):
self.element_dict.clear()
def get_element_list(self):
current_elements = [v for v in six.iterkeys(self.element_dict) if v.lower() != v]
logger.info('Current Elements for fitting are {}'.format(current_elements))
return current_elements
def update_peak_ratio(self):
"""
In case users change the max value.
"""
for v in six.itervalues(self.element_dict):
v.maxv = np.around(v.maxv, 1)
v.spectrum = v.spectrum*v.maxv/np.max(v.spectrum)
self.update_norm()
def turn_on_all(self, option=True):
"""
Set plotting status on for all lines.
"""
if option:
_plot = option
else:
_plot = False
for v in six.itervalues(self.element_dict):
v.status = _plot
class GuessParamModel(Atom):
"""
This is auto fit model to guess the initial parameters.
Attributes
----------
parameters : `atom.Dict`
A list of `Parameter` objects, subclassed from the `Atom` base class.
These `Parameter` objects hold all relevant xrf information.
data : array
1D array of spectrum
prefit_x : array
xX axis with range defined by low and high limits.
result_dict : dict
Save all the auto fitting results for each element.
It is a dictionary of object PreFitStatus.
param_d : dict
Parameters can be transferred into this dictionary.
param_new : dict
More information are saved, such as element position and width.
total_y : dict
Results from k lines
total_y_l : dict
Results from l lines
total_y_m : dict
Results from l lines
e_list : str
All elements used for fitting.
file_path : str
The path where file is saved.
element_list : list
"""
default_parameters = Dict()
#parameters = Dict() #Typed(OrderedDict) #OrderedDict()
data = Typed(object)
prefit_x = Typed(object)
result_dict = Typed(object) #Typed(OrderedDict)
result_dict_names = List()
#param_d = Dict()
param_new = Dict()
total_y = Dict()
total_y_l = Dict()
total_y_m = Dict()
e_name = Str()
add_element_intensity = Float()
#save_file = Str()
result_folder = Str()
#file_path = Str()
element_list = List()
data_sets = Typed(OrderedDict)
file_opt = Int()
data_all = Typed(np.ndarray)
EC = Typed(object)
def __init__(self, *args, **kwargs):
try:
self.default_parameters = kwargs['default_parameters']
#self.element_list, self.parameters = dict_to_param(self.default_parameters)
self.param_new = copy.deepcopy(self.default_parameters)
self.element_list = get_element(self.param_new)
#self.get_param(default_parameters)
except ValueError:
logger.info('No default parameter files are chosen.')
self.result_folder = kwargs['working_directory']
self.EC = ElementController()
def get_new_param(self, param_path):
"""
Update parameters if new param_path is given.
Parameters
----------
param_path : str
path to save the file
"""
with open(param_path, 'r') as json_data:
self.param_new = json.load(json_data)
#self.element_list, self.parameters = dict_to_param(self.param_new)
self.element_list = get_element(self.param_new)
self.EC.delete_all()
self.create_spectrum_from_file(self.param_new, self.element_list)
logger.info('Elements read from file are: {}'.format(self.element_list))
#self.element_list, self.parameters = self.get_param(new_param)
def create_spectrum_from_file(self, param_dict, elemental_lines):
"""
Create spectrum profile with given param dict from file.
Parameters
----------
param_dict : dict
dict obtained from file
elemental_lines : list
e.g., ['Na_K', Mg_K', 'Pt_M'] refers to the
K lines of Sodium, the K lines of Magnesium, and the M
lines of Platinum
"""
self.prefit_x, pre_dict = calculate_profile(self.data,
param_dict, elemental_lines)
#factor_to_area = factor_height2area()
temp_dict = OrderedDict()
for e in six.iterkeys(pre_dict):
ename = e.split('_')[0]
for k, v in six.iteritems(param_dict):
if ename in k and 'area' in k:
energy = float(get_energy(e))
factor_to_area = factor_height2area(energy, self.param_new)
ratio = v['value']/factor_to_area
spectrum = pre_dict[e] #/ np.max(pre_dict[e]) * ratio
elif ename == 'compton' and k == 'compton_amplitude':
# the rest-mass energy of an electron (511 keV)
mc2 = 511
comp_denom = (1 + self.param_new['coherent_sct_energy']['value']
/ mc2 * (1 - np.cos(np.deg2rad(self.param_new['compton_angle']['value']))))
compton_energy = self.param_new['coherent_sct_energy']['value'] / comp_denom
factor_to_area = factor_height2area(compton_energy, self.param_new,
std_correction=self.param_new['compton_fwhm_corr']['value'])
spectrum = pre_dict[e] #/ np.max(pre_dict[e]) * ratio
elif ename == 'elastic' and k == 'coherent_sct_amplitude':
factor_to_area = factor_height2area(self.param_new['coherent_sct_energy']['value'],
self.param_new)
ratio = v['value']/factor_to_area
spectrum = pre_dict[e] #/ np.max(pre_dict[e]) * ratio
elif ename == 'background':
spectrum = pre_dict[e]
else:
continue
ps = PreFitStatus(z=get_Z(ename), energy=get_energy(e), spectrum=spectrum,
maxv=np.around(np.max(spectrum), 1),
norm=-1, lbd_stat=False)
temp_dict.update({e: ps})
self.EC.add_to_dict(temp_dict)
@observe('file_opt')
def choose_file(self, change):
if self.file_opt == 0:
return
names = self.data_sets.keys()
self.data = self.data_sets[names[self.file_opt-1]].get_sum()
self.data_all = self.data_sets[names[self.file_opt-1]].raw_data
def manual_input(self):
default_area = 1e5
logger.info('Element {} is added'.format(self.e_name))
#param_dict = format_dict(self.parameters, self.element_list)
x, data_out = calculate_profile(self.data, self.param_new,
elemental_lines=[self.e_name], default_area=default_area)
ps = PreFitStatus(z=get_Z(self.e_name), energy=get_energy(self.e_name),
spectrum=data_out[self.e_name]/np.max(data_out[self.e_name])*self.add_element_intensity,
maxv=self.add_element_intensity, norm=-1,
lbd_stat=False)
self.EC.add_to_dict({self.e_name: ps})
def update_name_list(self):
"""
When result_dict_names change, the looper in enaml will update.
"""
# need to clean list first, in order to refresh the list in GUI
self.result_dict_names = []
self.result_dict_names = self.EC.element_dict.keys()
logger.info('Current element names are {}'.format(self.result_dict_names))
def find_peak(self, threshv=0.1):
"""
Run automatic peak finding, and save results as dict of object.
"""
#param_dict = format_dict(self.parameters, self.element_list)
self.prefit_x, out_dict = linear_spectrum_fitting(self.data,
self.param_new)
logger.info('Energy range: {}, {}'.format(self.param_new['non_fitting_values']['energy_bound_low']['value'],
self.param_new['non_fitting_values']['energy_bound_high']['value']))
#max_dict = reduce(max, map(np.max, six.itervalues(out_dict)))
prefit_dict = OrderedDict()
for k, v in six.iteritems(out_dict):
ps = PreFitStatus(z=get_Z(k), energy=get_energy(k), spectrum=v,
maxv=np.around(np.max(v), 1), norm=-1,
lbd_stat=False)
prefit_dict.update({k: ps})
logger.info('The elements from parameter guess: {}'.format(
prefit_dict.keys()))
self.EC.add_to_dict(prefit_dict)
def create_full_param(self, peak_std=0.07):
"""
Extend the param to full param dict with detailed elements
information, and assign initial values from pre fit.
Parameters
----------
peak_std : float
approximated std for element peak.
"""
self.element_list = self.EC.get_element_list()
self.param_new['non_fitting_values']['element_list'] = ', '.join(self.element_list)
#param_d = format_dict(self.parameters, self.element_list)
self.param_new = param_dict_cleaner(self.param_new, self.element_list)
print('element list before register: {}'.format(self.element_list))
# create full parameter list including elements
PC = ParamController(self.param_new, self.element_list)
#PC.create_full_param()
self.param_new = PC.params
# to create full param dict, for GUI only
create_full_dict(self.param_new, fit_strategy_list)
logger.info('full dict: {}'.format(self.param_new.keys()))
logger.info('incident energy: {}'.format(self.param_new['coherent_sct_energy']['value']))
# update according to pre fit results
if len(self.EC.element_dict):
for e in self.element_list:
zname = e.split('_')[0]
for k, v in six.iteritems(self.param_new):
if zname in k and 'area' in k:
factor_to_area = factor_height2area(float(self.EC.element_dict[e].energy),
self.param_new)
v['value'] = self.EC.element_dict[e].maxv * factor_to_area
if 'compton' in self.EC.element_dict:
gauss_factor = 1/(1 + self.param_new['compton_f_step']['value']
+ self.param_new['compton_f_tail']['value']
+ self.param_new['compton_hi_f_tail']['value'])
# the rest-mass energy of an electron (511 keV)
mc2 = 511
comp_denom = (1 + self.param_new['coherent_sct_energy']['value']
/ mc2 * (1 - np.cos(np.deg2rad(self.param_new['compton_angle']['value']))))
compton_energy = self.param_new['coherent_sct_energy']['value'] / comp_denom
factor_to_area = factor_height2area(compton_energy, self.param_new,
std_correction=self.param_new['compton_fwhm_corr']['value'])
self.param_new['compton_amplitude']['value'] = \
self.EC.element_dict['compton'].maxv * factor_to_area
if 'coherent_sct_amplitude' in self.EC.element_dict:
self.param_new['coherent_sct_amplitude']['value'] = np.sum(
self.EC.element_dict['elastic'].spectrum)
def data_for_plot(self):
"""
Save data in terms of K, L, M lines for plot.
"""
self.total_y = {}
self.total_y_l = {}
self.total_y_m = {}
new_dict = {k: v for (k, v) in six.iteritems(self.EC.element_dict) if v.status}
for k, v in six.iteritems(new_dict):
if 'K' in k:
self.total_y[k] = self.EC.element_dict[k].spectrum
elif 'L' in k:
self.total_y_l[k] = self.EC.element_dict[k].spectrum
elif 'M' in k:
self.total_y_m[k] = self.EC.element_dict[k].spectrum
else:
self.total_y[k] = self.EC.element_dict[k].spectrum
def save(self, fname='param_default1.json'):
"""
Save full param dict into a file at result directory.
The name of the file is predefined.
Parameters
----------
fname : str, optional
file name to save updated parameters
"""
fpath = os.path.join(self.result_folder, fname)
with open(fpath, 'w') as outfile:
json.dump(self.param_new, outfile,
sort_keys=True, indent=4)
def read_pre_saved(self, fname='param_default1.json'):
"""This is a bad idea."""
fpath = os.path.join(self.result_folder, fname)
with open(fpath, 'r') as infile:
data = json.load(infile)
return data
def save_as(file_path, data):
"""
Save full param dict into a file.
"""
with open(file_path, 'w') as outfile:
json.dump(data, outfile,
sort_keys=True, indent=4)
def calculate_profile(y0, param,
elemental_lines, default_area=1e5):
# Need to use deepcopy here to avoid unexpected change on parameter dict
fitting_parameters = copy.deepcopy(param)
x0 = np.arange(len(y0))
# ratio to transfer energy value back to channel value
approx_ratio = 100
lowv = fitting_parameters['non_fitting_values']['energy_bound_low']['value'] * approx_ratio
highv = fitting_parameters['non_fitting_values']['energy_bound_high']['value'] * approx_ratio
x, y = trim(x0, y0, lowv, highv)
e_select, matv = construct_linear_model(x, fitting_parameters,
elemental_lines,
default_area=default_area)
non_element = ['compton', 'elastic']
total_list = e_select + non_element
total_list = [str(v) for v in total_list]
temp_d = {k: v for (k, v) in zip(total_list, matv.transpose())}
# get background
bg = snip_method(y, fitting_parameters['e_offset']['value'],
fitting_parameters['e_linear']['value'],
fitting_parameters['e_quadratic']['value'])
temp_d.update(background=bg)
#for i in len(total_list):
# temp_d[total_list[i]] = matv[:, i]
x = (fitting_parameters['e_offset']['value']
+ fitting_parameters['e_linear']['value'] * x
+ fitting_parameters['e_quadratic']['value'] * x**2)
return x, temp_d
def create_full_dict(param, name_list):
"""
Create full param dict so each item has same nested dict.
This is for GUI purpose only.
.. warning :: This function mutates the input values.
Pamameters
----------
param : dict
all parameters including element
name_list : list
strategy names
"""
for n in name_list:
for k, v in six.iteritems(param):
if k == 'non_fitting_values':
continue
if n not in v:
v.update({n: v['bound_type']})
def get_Z(ename):
"""
Return element's Z number.
Parameters
----------
ename : str
element name
Returns
-------
int or None
element Z number
"""
strip_line = lambda ename: ename.split('_')[0]
non_element = ['compton', 'elastic', 'background']
if ename in non_element:
return '-'
else:
e = Element(strip_line(ename))
return str(e.Z)
def get_energy(ename):
strip_line = lambda ename: ename.split('_')[0]
non_element = ['compton', 'elastic', 'background']
if ename in non_element:
return '-'
else:
e = Element(strip_line(ename))
if '_K' in ename:
energy = e.emission_line['ka1']
elif '_L' in ename:
energy = e.emission_line['la1']
elif '_M' in ename:
energy = e.emission_line['ma1']
return str(np.around(energy, 4))
def get_element(param):
element_list = param['non_fitting_values']['element_list']
return [e.strip(' ') for e in element_list.split(',')]
def factor_height2area(energy, param, std_correction=1):
"""
Factor to transfer peak height to area.
"""
temp_val = 2 * np.sqrt(2 * np.log(2))
epsilon = param['non_fitting_values']['electron_hole_energy']
sigma = np.sqrt((param['fwhm_offset']['value'] / temp_val)**2
+ energy * epsilon * param['fwhm_fanoprime']['value'])
return sigma*std_correction
def param_dict_cleaner(param, element_list):
"""
Make sure param only contains element from element_list.
Parameters
----------
param : dict
fitting parameters
element_list : list
list of elemental lines
Returns
-------
dict :
new param dict containing given elements
"""
param_new = {}
for k, v in six.iteritems(param):
if k == 'non_fitting_values' or k == k.lower():
param_new.update({k: v})
else:
if k[:2] in element_list:
param_new.update({k: v})
return param_new
|
bsd-3-clause
| -2,165,580,146,740,394,200
| 36.340541
| 118
| 0.551446
| false
| 3.962002
| false
| false
| false
|
aznashwan/heat2arm
|
heat2arm/translators/networking/secgroups/ec2_secgroup.py
|
1
|
3254
|
# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Contains the definition for the EC2 security group translator.
"""
from heat2arm.translators.networking.secgroups.base_secgroup import (
BaseSecurityGroupARMTranslator
)
class EC2SecurityGroupARMTranslator(BaseSecurityGroupARMTranslator):
""" EC2SecurityGroupARMTranslator is the translator
for EC2 security groups.
"""
heat_resource_type = "AWS::EC2::SecurityGroup"
def _get_rules(self):
""" _get_rules is a helper method which returns a list of all
the resulting ARM security group rules to be created.
"""
i = 0
rules = []
# traverse all ingress rules; if any:
if "SecurityGroupIngress" in self._heat_resource.properties.data:
for in_rule in self._heat_resource.properties.data[
"SecurityGroupIngress"]:
# build the rule:
rule = {
"name": "%s_rule_%d" % (self._name, i),
"properties": {
"protocol": in_rule["IpProtocol"],
"sourcePortRange": in_rule["FromPort"],
"destinationPortRange": in_rule["ToPort"],
"sourceAddressPrefix": in_rule["CidrIp"],
"destinationAddressPrefix": "*",
"direction": "Inbound",
"access": "Allow",
# NOTE: priority is always fixed.
"priority": 100 + i,
}
}
i = i + 1
rules.append(rule)
# traverse all egress rules; if any:
if "SecurityGroupEgress" in self._heat_resource.properties.data:
for out_rule in self._heat_resource.properties.data[
"SecurityGroupEgress"]:
# build the rule:
rule = {
"name": "%s_rule_%d" % (self._name, i),
"properties": {
"protocol": out_rule["IpProtocol"],
"sourcePortRange": out_rule["FromPort"],
"destinationPortRange": out_rule["ToPort"],
"sourceAddressPrefix": out_rule["CidrIp"],
"destinationAddressPrefix": "*",
"direction": "Outbound",
"access": "Allow",
# NOTE: priority is always fixed.
"priority": 100 + i,
}
}
i = i + 1
rules.append(rule)
return rules
|
apache-2.0
| -445,866,815,154,035,600
| 37.738095
| 78
| 0.527966
| false
| 4.750365
| false
| false
| false
|
corakwue/ftrace
|
ftrace/parsers/sched_load_avg_cpu.py
|
1
|
2005
|
#!/usr/bin/python
# Copyright 2015 Huawei Devices USA Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Authors:
# Chuk Orakwue <chuk.orakwue@huawei.com>
import re
from ftrace.common import ParserError
from .register import register_parser
try:
from ftrace.third_party.cnamedtuple import namedtuple
except ImportError:
from collections import namedtuple
TRACEPOINT = 'sched_load_avg_cpu'
__all__ = [TRACEPOINT]
SchedLoadAvgCpuBase = namedtuple(TRACEPOINT,
[
'cpu'
'load_avg',
'util_avg'
]
)
class SchedLoadAvgCpu(SchedLoadAvgCpuBase):
__slots__ = ()
def __new__(cls, cpu, load_avg, util_avg):
cpu = int(cpu)
load_avg = int(load_avg)
util_avg = int(util_avg)
return super(cls, SchedLoadAvgCpu).__new__(
cls,
cpu=cpu,
load_avg=load_avg,
util_avg=util_avg,
)
sched_load_avg_cpu_pattern = re.compile(
r"""
cpu=(?P<cpu>\d+)\s+
load_avg=(?P<load_avg>\d+)\s+
util_avg=(?P<util_avg>\d+)
""",
re.X|re.M
)
@register_parser
def sched_load_avg_cpu(payload):
"""Parser for `sched_load_avg_cpu` tracepoint"""
try:
match = re.match(sched_load_avg_cpu_pattern, payload)
if match:
match_group_dict = match.groupdict()
return SchedLoadAvgCpu(**match_group_dict)
except Exception, e:
raise ParserError(e.message)
|
apache-2.0
| 1,968,760,944,318,342,700
| 26.094595
| 74
| 0.632918
| false
| 3.523726
| false
| false
| false
|
grepme/cmput410-project
|
api/urls.py
|
1
|
1779
|
from django.conf.urls import patterns, include, url
guid_regex = "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}"
sha1_regex = "[a-zA-Z0-9]+"
id_regex = "(({guid})|({sha1}))".format(guid=guid_regex,sha1=sha1_regex)
urlpatterns = patterns('api.views',
# get posts by a specific author that the current authenticated user
# can view
# author/id1/posts
# /api/author/ef3e0e05-c5f8-11e4-a972-b8f6b116b2b7/posts/
(r'^author/(?:(?P<author_id>{})/?)?posts/?(?:/(?P<page>\d*)/?)?$'.format(id_regex), 'get_posts'),
# Get a specific post or all public posts
(r'^posts/?(?:(?P<post_id>{}))?/?$'.format(id_regex), 'get_post'),
#See if a author_id is a friend with author_2_id
(r'^friends/(?P<author_id>{0})/(?P<author_2_id>{0})/?$'.format(id_regex), 'is_friend'),
# POST authors, returns list of friends in the list
(r'^friends/(?P<author_id>{})/?$'.format(id_regex), 'get_friends'),
# GET authors on our server
(r'^author$', 'get_authors'),
# GET author on our server
(r'^author/(?P<profile_id>{})/?$'.format(id_regex), 'get_author'),
# Make a friend request with another user
(r'^friendrequest$', 'friend_request'),
# Follow a specific user
(r'^follow$', 'follow_user'),
# search for a user
#(r'search/(?P<name>([a-zA-Z0-9 -._~:?#%]+))/?$', 'search_users'),
)
|
apache-2.0
| -7,852,624,737,404,225,000
| 44.615385
| 121
| 0.45756
| false
| 3.414587
| false
| false
| false
|
jspilker/visilens
|
examples/Ex1_get_data_continuum.py
|
1
|
4773
|
"""
Example 1, to be run within CASA. This script serves as a guideline
for how to get data out of a CASA ms and into a format which
visilens can use. We really don't need all that much information,
so we keep only the columns we need.
To keep the number of visibilities low, we first average the data
a bit. In this particular case, the on-source integration times were
only ~60s, so we won't average in time. We will average down each of
the four ALMA basebands (spectral windows), since this is continuum
data and the fractional bandwidth from the lowest to highest observed
frequency is small. We'll also average the two orthogonal polarizations,
since the source is unpolarized. Last, for fitting, we need an
accurate estimate of the uncertainty on each visibility. The *relative*
uncertainties in the data are okay, but they're not on any absolute scale,
so we need to calculate what the re-scaling factor should be. To do this,
we take the difference between successive visibilities on each baseline
(these are strong sources, so unfortunately we can't just use the rms)
and re-scale the noise to match. In principle CASA's statwt also does
this, but I found that it sometimes gave bizarre results (some baselines
weighted 100x more than others for no obvious reason, etc.). If you
have better luck with it, feel free to use that instead!
"""
import numpy as np
import os
c = 299792458.0 # in m/s
# Path to the calibrated ms file, and the source name we want.
inms = 'Compact_0202_to_0418.cal.ms'
field = 'SPT0202-61'
spw = '0,1,2,3'
# First we split out just the source we want from our ms file.
outms = field+'_'+inms[:3].lower()+'.ms'
os.system('rm -rf '+outms)
split(inms,outms,field=field,spw=spw,width=128,datacolumn='corrected',
keepflags=False)
# Now we'll get the visibility columns we need, before manipulating them.
# data_desc_id is a proxy for the spw number.
ms.open(outms,nomodify=True)
visdata = ms.getdata(['uvw','antenna1','antenna2','data','sigma','data_desc_id'])
visdata['data'] = np.squeeze(visdata['data']) # ditch unnecessary extra dimension
ms.close()
# Get the frequencies associated with each spw, because uvw coordinates are in m
tb.open(outms+'/SPECTRAL_WINDOW')
freqs = np.squeeze(tb.getcol('CHAN_FREQ')) # center freq of each spw
tb.close()
# Get the primary beam size from the antenna diameter. Assumes homogeneous array,
# sorry CARMA users.
tb.open(outms+'/ANTENNA')
diam = np.squeeze(tb.getcol('DISH_DIAMETER'))[0]
PBfwhm = 1.2*(c/np.mean(freqs))/diam * (3600*180/np.pi) # in arcsec
tb.close()
# Data and sigma have both polarizations; average them
visdata['data'] = np.average(visdata['data'],weights=(visdata['sigma']**-2.),axis=0)
visdata['sigma']= np.sum((visdata['sigma']**-2.),axis=0)**-0.5
# Convert uvw coords from m to lambda
for ispw in range(len(spw.split(','))):
visdata['uvw'][:,visdata['data_desc_id']==ispw] *= freqs[ispw]/c
# Calculate the noise re-scaling, by differencing consecutive visibilities on the
# same baseline. Have to do an ugly double-loop here; would work better if we knew
# in advance how the data were ordered (eg time-sorted). We assume that we can
# re-scale the noise using the mean of the re-scalings from each baseline.
facs = []
for ant1 in np.unique(visdata['antenna1']):
for ant2 in np.unique(visdata['antenna2']):
if ant1 < ant2:
thisbase = (visdata['antenna1']==ant1) & (visdata['antenna2']==ant2)
reals = visdata['data'].real[thisbase]
imags = visdata['data'].imag[thisbase]
sigs = visdata['sigma'][thisbase]
diffrs = reals - np.roll(reals,-1); diffis = imags - np.roll(imags,-1)
std = np.mean([diffrs.std(),diffis.std()])
facs.append(std/sigs.mean()/np.sqrt(2))
facs = np.asarray(facs); visdata['sigma'] *= facs.mean()
print outms, '| mean rescaling factor: ',facs.mean(), '| rms/beam (mJy): ',1000*((visdata['sigma']**-2).sum())**-0.5
# If we ever want to mess with the data after re-scaling the weights, we have to
# write them back to the ms file. But, CASA doesn't like that we've averaged
# the polarizations together, so we have to keep them separate for this purpose.
ms.open(outms,nomodify=False)
replace = ms.getdata(['sigma','weight'])
replace['sigma'] *= facs.mean()
replace['weight'] = replace['sigma']**-2.
ms.putdata(replace)
ms.close()
# Create one single array of all this data, then save everything.
allarr = np.vstack((visdata['uvw'][0,:],visdata['uvw'][1,:],visdata['data'].real,
visdata['data'].imag,visdata['sigma'],visdata['antenna1'],visdata['antenna2']))
outfname = field+'_'+inms[:3].lower()+'.bin'
with open(outfname,'wb')as f:
allarr.tofile(f)
f.write(PBfwhm)
|
mit
| 5,476,157,098,352,860,000
| 44.457143
| 116
| 0.699979
| false
| 3.229364
| false
| false
| false
|
bbondy/brianbondy.gae
|
libs/sx/pisa3/pisa_tables.py
|
1
|
13877
|
# -*- coding: ISO-8859-1 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__reversion__ = "$Revision: 20 $"
__author__ = "$Author: holtwick $"
__date__ = "$Date: 2007-10-09 12:58:24 +0200 (Di, 09 Okt 2007) $"
from pisa_tags import pisaTag
from pisa_util import *
from pisa_reportlab import PmlTable, TableStyle, PmlKeepInFrame
import copy
import sys
import logging
log = logging.getLogger("ho.pisa")
def _width(value=None):
if value is None:
return None
value = str(value)
if value.endswith("%"):
return value
return getSize(value)
class TableData:
def __init__(self):
self.data = []
self.styles = []
self.span = []
self.mode = ""
self.padding = 0
self.col = 0
# self.c = None
def add_cell(self, data=None):
self.col += 1
self.data[len(self.data) - 1].append(data)
def add_style(self, data):
# print self.mode, data
# Do we have color and
# width = data[3]
#if data[0].startswith("LINE"):
# color = data[4]
# if color is None:
# return
self.styles.append(copy.copy(data))
def add_empty(self, x, y):
self.span.append((x, y))
def get_data(self):
data = self.data
for x, y in self.span:
try:
data[y].insert(x, '')
except:
pass
return data
def add_cell_styles(self, c, begin, end, mode="td"):
def getColor(a, b):
return a
self.mode = mode.upper()
if c.frag.backColor and mode != "tr": # XXX Stimmt das so?
self.add_style(('BACKGROUND', begin, end, c.frag.backColor))
# print 'BACKGROUND', begin, end, c.frag.backColor
if 0:
log.debug("%r", (
begin,
end,
c.frag.borderTopWidth,
c.frag.borderTopStyle,
c.frag.borderTopColor,
c.frag.borderBottomWidth,
c.frag.borderBottomStyle,
c.frag.borderBottomColor,
c.frag.borderLeftWidth,
c.frag.borderLeftStyle,
c.frag.borderLeftColor,
c.frag.borderRightWidth,
c.frag.borderRightStyle,
c.frag.borderRightColor,
))
if getBorderStyle(c.frag.borderTopStyle) and c.frag.borderTopWidth and c.frag.borderTopColor is not None:
self.add_style(('LINEABOVE', begin, (end[0], begin[1]),
c.frag.borderTopWidth,
c.frag.borderTopColor,
"squared"))
if getBorderStyle(c.frag.borderLeftStyle) and c.frag.borderLeftWidth and c.frag.borderLeftColor is not None:
self.add_style(('LINEBEFORE', begin, (begin[0], end[1]),
c.frag.borderLeftWidth,
c.frag.borderLeftColor,
"squared"))
if getBorderStyle(c.frag.borderRightStyle) and c.frag.borderRightWidth and c.frag.borderRightColor is not None:
self.add_style(('LINEAFTER', (end[0], begin[1]), end,
c.frag.borderRightWidth,
c.frag.borderRightColor,
"squared"))
if getBorderStyle(c.frag.borderBottomStyle) and c.frag.borderBottomWidth and c.frag.borderBottomColor is not None:
self.add_style(('LINEBELOW', (begin[0], end[1]), end,
c.frag.borderBottomWidth,
c.frag.borderBottomColor,
"squared"))
self.add_style(('LEFTPADDING', begin, end, c.frag.paddingLeft or self.padding))
self.add_style(('RIGHTPADDING', begin, end, c.frag.paddingRight or self.padding))
self.add_style(('TOPPADDING', begin, end, c.frag.paddingTop or self.padding))
self.add_style(('BOTTOMPADDING', begin, end, c.frag.paddingBottom or self.padding))
class pisaTagTABLE(pisaTag):
def start(self, c):
c.addPara()
attrs = self.attr
# Swap table data
c.tableData, self.tableData = TableData(), c.tableData
tdata = c.tableData
# border
#tdata.border = attrs.border
#tdata.bordercolor = attrs.bordercolor
begin = (0, 0)
end = (-1, - 1)
if attrs.border and attrs.bordercolor:
frag = c.frag
frag.borderLeftWidth = attrs.border
frag.borderLeftColor = attrs.bordercolor
frag.borderLeftStyle = "solid"
frag.borderRightWidth = attrs.border
frag.borderRightColor = attrs.bordercolor
frag.borderRightStyle = "solid"
frag.borderTopWidth = attrs.border
frag.borderTopColor = attrs.bordercolor
frag.borderTopStyle = "solid"
frag.borderBottomWidth = attrs.border
frag.borderBottomColor = attrs.bordercolor
frag.borderBottomStyle = "solid"
# tdata.add_style(("GRID", begin, end, attrs.border, attrs.bordercolor))
tdata.padding = attrs.cellpadding
#if 0: #attrs.cellpadding:
# tdata.add_style(('LEFTPADDING', begin, end, attrs.cellpadding))
# tdata.add_style(('RIGHTPADDING', begin, end, attrs.cellpadding))
# tdata.add_style(('TOPPADDING', begin, end, attrs.cellpadding))
# tdata.add_style(('BOTTOMPADDING', begin, end, attrs.cellpadding))
# alignment
#~ tdata.add_style(('VALIGN', (0,0), (-1,-1), attrs.valign.upper()))
# Set Border and padding styles
tdata.add_cell_styles(c, (0, 0), (-1, - 1), "table")
# bgcolor
#if attrs.bgcolor is not None:
# tdata.add_style(('BACKGROUND', (0, 0), (-1, -1), attrs.bgcolor))
tdata.align = attrs.align.upper()
tdata.col = 0
tdata.row = 0
tdata.colw = []
tdata.rowh = []
tdata.repeat = attrs.repeat
tdata.width = _width(attrs.width)
# self.tabdata.append(tdata)
def end(self, c):
tdata = c.tableData
data = tdata.get_data()
# Add missing columns so that each row has the same count of columns
# This prevents errors in Reportlab table
try:
maxcols = max([len(row) for row in data] or [0])
except ValueError:
log.warn(c.warning("<table> rows seem to be inconsistent"))
maxcols = [0]
for i, row in enumerate(data):
data[i] += [''] * (maxcols - len(row))
try:
if tdata.data:
# log.debug("Table sryles %r", tdata.styles)
t = PmlTable(
data,
colWidths=tdata.colw,
rowHeights=tdata.rowh,
# totalWidth = tdata.width,
splitByRow=1,
# repeatCols = 1,
repeatRows=tdata.repeat,
hAlign=tdata.align,
vAlign='TOP',
style=TableStyle(tdata.styles))
t.totalWidth = _width(tdata.width)
t.spaceBefore = c.frag.spaceBefore
t.spaceAfter = c.frag.spaceAfter
# XXX Maybe we need to copy some more properties?
t.keepWithNext = c.frag.keepWithNext
# t.hAlign = tdata.align
c.addStory(t)
else:
log.warn(c.warning("<table> is empty"))
except:
log.warn(c.warning("<table>"), exc_info=1)
# Cleanup and re-swap table data
c.clearFrag()
c.tableData, self.tableData = self.tableData, None
class pisaTagTR(pisaTag):
def start(self, c):
tdata = c.tableData
row = tdata.row
begin = (0, row)
end = (-1, row)
tdata.add_cell_styles(c, begin, end, "tr")
c.frag.vAlign = self.attr.valign or c.frag.vAlign
tdata.col = 0
tdata.data.append([])
def end(self, c):
c.tableData.row += 1
class pisaTagTD(pisaTag):
def start(self, c):
if self.attr.align is not None:
#print self.attr.align, getAlign(self.attr.align)
c.frag.alignment = getAlign(self.attr.align)
c.clearFrag()
self.story = c.swapStory()
# print "#", len(c.story)
attrs = self.attr
tdata = c.tableData
cspan = attrs.colspan
rspan = attrs.rowspan
row = tdata.row
col = tdata.col
while 1:
for x, y in tdata.span:
if x == col and y == row:
col += 1
tdata.col += 1
break
#cs = 0
#rs = 0
begin = (col, row)
end = (col, row)
if cspan:
end = (end[0] + cspan - 1, end[1])
if rspan:
end = (end[0], end[1] + rspan - 1)
if begin != end:
#~ print begin, end
tdata.add_style(('SPAN', begin, end))
for x in range(begin[0], end[0] + 1):
for y in range(begin[1], end[1] + 1):
if x != begin[0] or y != begin[1]:
tdata.add_empty(x, y)
# Set Border and padding styles
tdata.add_cell_styles(c, begin, end, "td")
# Calculate widths
# Add empty placeholders for new columns
if (col + 1) > len(tdata.colw):
tdata.colw = tdata.colw + ((col + 1 - len(tdata.colw)) * [_width()])
# Get value of with, if no spanning
if not cspan:
# print c.frag.width
width = c.frag.width or self.attr.width #self._getStyle(None, attrs, "width", "width", mode)
# If is value, the set it in the right place in the arry
# print width, _width(width)
if width is not None:
tdata.colw[col] = _width(width)
# Calculate heights
if row + 1 > len(tdata.rowh):
tdata.rowh = tdata.rowh + ((row + 1 - len(tdata.rowh)) * [_width()])
if not rspan:
height = None #self._getStyle(None, attrs, "height", "height", mode)
if height is not None:
tdata.rowh[row] = _width(height)
tdata.add_style(('FONTSIZE', begin, end, 1.0))
tdata.add_style(('LEADING', begin, end, 1.0))
# Vertical align
valign = self.attr.valign or c.frag.vAlign
if valign is not None:
tdata.add_style(('VALIGN', begin, end, valign.upper()))
# Reset border, otherwise the paragraph block will have borders too
frag = c.frag
frag.borderLeftWidth = 0
frag.borderLeftColor = None
frag.borderLeftStyle = None
frag.borderRightWidth = 0
frag.borderRightColor = None
frag.borderRightStyle = None
frag.borderTopWidth = 0
frag.borderTopColor = None
frag.borderTopStyle = None
frag.borderBottomWidth = 0
frag.borderBottomColor = None
frag.borderBottomStyle = None
def end(self, c):
tdata = c.tableData
c.addPara()
cell = c.story
# Handle empty cells, they otherwise collapse
#if not cell:
# cell = ' '
# Keep in frame if needed since Reportlab does no split inside of cells
if (not c.frag.insideStaticFrame) and (c.frag.keepInFrameMode is not None):
# tdata.keepinframe["content"] = cell
cell = PmlKeepInFrame(
maxWidth=0,
maxHeight=0,
mode=c.frag.keepInFrameMode,
content=cell)
c.swapStory(self.story)
tdata.add_cell(cell)
class pisaTagTH(pisaTagTD):
pass
'''
end_th = end_td
def start_keeptogether(self, attrs):
self.story.append([])
self.next_para()
def end_keeptogether(self):
if not self.story[-1]:
self.add_noop()
self.next_para()
s = self.story.pop()
self.add_story(KeepTogether(s))
def start_keepinframe(self, attrs):
self.story.append([])
self.keepinframe = {
"maxWidth": attrs["maxwidth"],
"maxHeight": attrs["maxheight"],
"mode": attrs["mode"],
"name": attrs["name"],
"mergeSpace": attrs["mergespace"]
}
# print self.keepinframe
self.next_para()
def end_keepinframe(self):
if not self.story[-1]:
self.add_noop()
self.next_para()
self.keepinframe["content"] = self.story.pop()
self.add_story(KeepInFrame(**self.keepinframe))
'''
|
mit
| 3,694,893,880,446,124,500
| 32.439206
| 122
| 0.517475
| false
| 3.857937
| false
| false
| false
|
timevortexproject/timevortex
|
weather/utils/globals.py
|
1
|
1404
|
#!/usr/bin/python3
# -*- coding: utf8 -*-
# -*- Mode: Python; py-indent-offset: 4 -*-
"""Globals for weather app"""
from datetime import datetime, timedelta
from django.conf import settings
KEY_METEAR_NO_SITE_ID = "metear_no_site_id"
KEY_METEAR_BAD_URL = "metear_bad_url"
KEY_METEAR_PROBLEM_WS = "metear_problem_ws"
KEY_METEAR_BAD_CONTENT = "metear_bad_content"
KEY_METEAR_NO_START_DATE = "metear_no_start_date"
PROCESS_STOPPED = "Process stopped. Wait a minute before retrying."
ERROR_METEAR = {
KEY_METEAR_NO_SITE_ID: "No METEAR Site in database. %s" % PROCESS_STOPPED,
KEY_METEAR_BAD_URL: "Bad URL to target METEAR service. %s" % PROCESS_STOPPED,
KEY_METEAR_PROBLEM_WS: "METEAR Web service does not respond. %s" % PROCESS_STOPPED,
KEY_METEAR_BAD_CONTENT: "Bad content from METEAR Web service. %s" % PROCESS_STOPPED,
KEY_METEAR_NO_START_DATE: "No start date found in DB. %s" % PROCESS_STOPPED,
}
SETTINGS_METEAR_URL = "METEAR_URL"
SETTINGS_DEFAULT_METEAR_URL = "http://www.wunderground.com/history/airport/%s/%s/DailyHistory.html?format=1"
SETTINGS_STUBS_METEAR_URL = "%s%s" % (settings.SITE_URL, "/stubs/history/airport/%s/%s/DailyHistory.html?format=1")
SETTINGS_STUBS_NEW_METEAR_URL = "%s%s" % (
settings.SITE_URL, "/stubs/history/airport/%s/%s/NewDailyHistory.html?format=1")
SETTINGS_STUBS_METEAR_START_DATE = (datetime.today() - timedelta(days=3)).strftime("%Y/%m/%d")
|
mit
| -9,200,364,049,418,442,000
| 49.142857
| 115
| 0.712251
| false
| 2.664137
| false
| false
| false
|
cryptapus/electrum-myr
|
lib/jsonrpc.py
|
1
|
3726
|
#!/usr/bin/env python3
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2018 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer, SimpleJSONRPCRequestHandler
from base64 import b64decode
import time
from . import util
class RPCAuthCredentialsInvalid(Exception):
def __str__(self):
return 'Authentication failed (bad credentials)'
class RPCAuthCredentialsMissing(Exception):
def __str__(self):
return 'Authentication failed (missing credentials)'
class RPCAuthUnsupportedType(Exception):
def __str__(self):
return 'Authentication failed (only basic auth is supported)'
# based on http://acooke.org/cute/BasicHTTPA0.html by andrew cooke
class VerifyingJSONRPCServer(SimpleJSONRPCServer):
def __init__(self, rpc_user, rpc_password, *args, **kargs):
self.rpc_user = rpc_user
self.rpc_password = rpc_password
class VerifyingRequestHandler(SimpleJSONRPCRequestHandler):
def parse_request(myself):
# first, call the original implementation which returns
# True if all OK so far
if SimpleJSONRPCRequestHandler.parse_request(myself):
try:
self.authenticate(myself.headers)
return True
except (RPCAuthCredentialsInvalid, RPCAuthCredentialsMissing,
RPCAuthUnsupportedType) as e:
myself.send_error(401, str(e))
except BaseException as e:
import traceback, sys
traceback.print_exc(file=sys.stderr)
myself.send_error(500, str(e))
return False
SimpleJSONRPCServer.__init__(
self, requestHandler=VerifyingRequestHandler, *args, **kargs)
def authenticate(self, headers):
if self.rpc_password == '':
# RPC authentication is disabled
return
auth_string = headers.get('Authorization', None)
if auth_string is None:
raise RPCAuthCredentialsMissing()
(basic, _, encoded) = auth_string.partition(' ')
if basic != 'Basic':
raise RPCAuthUnsupportedType()
encoded = util.to_bytes(encoded, 'utf8')
credentials = util.to_string(b64decode(encoded), 'utf8')
(username, _, password) = credentials.partition(':')
if not (util.constant_time_compare(username, self.rpc_user)
and util.constant_time_compare(password, self.rpc_password)):
time.sleep(0.050)
raise RPCAuthCredentialsInvalid()
|
mit
| 1,195,370,511,997,288,000
| 38.221053
| 91
| 0.665056
| false
| 4.555012
| false
| false
| false
|
google-research/google-research
|
ipagnn/adapters/gat_adapters.py
|
1
|
2892
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adapters for GAT models."""
import jax
import jax.numpy as jnp
from ipagnn.adapters import common_adapters
class GATAdapter(common_adapters.SequenceAdapter):
"""Adapter for GAT model."""
def as_example(self, dataset_item):
inputs = jax.tree_map(lambda x: x.numpy(), dataset_item)
example = {
'start_index': inputs['cfg']['start_index'],
'exit_index': inputs['cfg']['exit_index'],
'data': inputs['cfg']['data'],
'edge_types': inputs['cfg']['edge_types'],
'source_indices': inputs['cfg']['adjacency_list/source_indices'],
'dest_indices': inputs['cfg']['adjacency_list/dest_indices'],
'steps': inputs['cfg']['steps'],
'target_output': inputs['target_output'],
'target_output_length': inputs['target_output_length'],
'human_readable_target_output': inputs['human_readable_target_output'],
'human_readable_code': inputs['human_readable_code'],
}
if 'error_type' in inputs:
example['error_type'] = inputs['error_type']
return example
def get_train_inputs(self, example):
return {key: value for key, value in example.items()
if value.dtype != jnp.dtype('O')}
class GGNNAdapter(common_adapters.SequenceAdapter):
"""Adapter for GGNN model."""
def as_example(self, dataset_item):
inputs = jax.tree_map(lambda x: x.numpy(), dataset_item)
example = {
'start_index': inputs['cfg']['start_index'],
'exit_index': inputs['cfg']['exit_index'],
'data': inputs['cfg']['data'],
'edge_types': inputs['cfg']['edge_types'],
'source_indices': inputs['cfg']['adjacency_list/source_indices'],
'dest_indices': inputs['cfg']['adjacency_list/dest_indices'],
'steps': inputs['cfg']['steps'],
'target_output': inputs['target_output'],
'target_output_length': inputs['target_output_length'],
'human_readable_target_output': inputs['human_readable_target_output'],
'human_readable_code': inputs['human_readable_code'],
}
if 'error_type' in inputs:
example['error_type'] = inputs['error_type']
return example
def get_train_inputs(self, example):
return {key: value for key, value in example.items()
if value.dtype != jnp.dtype('O')}
|
apache-2.0
| 6,610,926,958,701,834,000
| 37.56
| 79
| 0.651107
| false
| 3.770535
| false
| false
| false
|
googleapis/python-workflows
|
google/cloud/workflows_v1beta/services/workflows/pagers.py
|
1
|
5782
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import (
Any,
AsyncIterable,
Awaitable,
Callable,
Iterable,
Sequence,
Tuple,
Optional,
)
from google.cloud.workflows_v1beta.types import workflows
class ListWorkflowsPager:
"""A pager for iterating through ``list_workflows`` requests.
This class thinly wraps an initial
:class:`google.cloud.workflows_v1beta.types.ListWorkflowsResponse` object, and
provides an ``__iter__`` method to iterate through its
``workflows`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListWorkflows`` requests and continue to iterate
through the ``workflows`` field on the
corresponding responses.
All the usual :class:`google.cloud.workflows_v1beta.types.ListWorkflowsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., workflows.ListWorkflowsResponse],
request: workflows.ListWorkflowsRequest,
response: workflows.ListWorkflowsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.workflows_v1beta.types.ListWorkflowsRequest):
The initial request object.
response (google.cloud.workflows_v1beta.types.ListWorkflowsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = workflows.ListWorkflowsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[workflows.ListWorkflowsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[workflows.Workflow]:
for page in self.pages:
yield from page.workflows
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
class ListWorkflowsAsyncPager:
"""A pager for iterating through ``list_workflows`` requests.
This class thinly wraps an initial
:class:`google.cloud.workflows_v1beta.types.ListWorkflowsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``workflows`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListWorkflows`` requests and continue to iterate
through the ``workflows`` field on the
corresponding responses.
All the usual :class:`google.cloud.workflows_v1beta.types.ListWorkflowsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
method: Callable[..., Awaitable[workflows.ListWorkflowsResponse]],
request: workflows.ListWorkflowsRequest,
response: workflows.ListWorkflowsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiates the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.workflows_v1beta.types.ListWorkflowsRequest):
The initial request object.
response (google.cloud.workflows_v1beta.types.ListWorkflowsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = workflows.ListWorkflowsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[workflows.ListWorkflowsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[workflows.Workflow]:
async def async_generator():
async for page in self.pages:
for response in page.workflows:
yield response
return async_generator()
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
|
apache-2.0
| 7,345,909,831,732,091,000
| 36.303226
| 87
| 0.654445
| false
| 4.468315
| false
| false
| false
|
sakthivigneshr/homeauto
|
src/control/rpi/rpi_gpio_slave.py
|
1
|
1752
|
import pika
import RPi.GPIO as GPIO
import paho.mqtt.client as mqtt
from threading import Thread
USER = "test"
PASS = "test123"
VHOST = "/cloudlynk"
HOST = "mohawk.link"
KEY = "solfeta"
XCHANGE = "home"
OUTPUT_PIN = 7
def callback(ch, method, properties, body):
level = int(body)
print("received msg: " + repr(level))
GPIO.output(OUTPUT_PIN, level)
def on_message(mqttc, app_data, msg):
level = int(msg.payload)
print "Received message " + repr(level)
GPIO.output(OUTPUT_PIN, level)
def on_connect(mqttc, app_data, flags, rc):
print "Connect successful"
mqttc.subscribe("control/lights/00")
class rabbitConnect(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
print "Starting RabbitMQ"
cred = pika.PlainCredentials(USER, PASS)
conn = pika.BlockingConnection(pika.ConnectionParameters(
host=HOST, virtual_host=VHOST, credentials=cred))
chan = conn.channel()
chan.exchange_declare(exchange=XCHANGE, type='topic')
rslt = chan.queue_declare(exclusive=True)
q = rslt.method.queue
chan.queue_bind(exchange=XCHANGE, queue=q, routing_key=KEY)
chan.basic_consume(callback, queue=q, no_ack=True)
chan.start_consuming()
class mqttConnect(Thread):
def __init__(self):
Thread.__init__(self)
def run(self):
print "Starting MQTT"
mqttc = mqtt.Client()
mqttc.on_message = on_message
mqttc.on_connect = on_connect
mqttc.connect("mohawk.link", 1883, 60)
mqttc.loop_forever()
class main():
# Setup the pins
GPIO.setmode(GPIO.BOARD)
GPIO.setup(OUTPUT_PIN, GPIO.OUT)
myThreadObj1 = rabbitConnect()
myThreadObj1.start()
myThreadObj2 = mqttConnect()
myThreadObj2.start()
myThreadObj1.join()
myThreadObj2.join()
if __name__ == "__main__":
main()
|
mit
| 8,074,197,039,635,007,000
| 22.052632
| 61
| 0.694635
| false
| 2.839546
| false
| false
| false
|
jakublipinski/i2Gmail-Backup-macOS-Messages-To-Gmail
|
contacts.py
|
1
|
2674
|
import gdata.data
import gdata.contacts.client
import gdata.contacts.data
import string
import config
class Contacts:
def __init__(self, credentials):
auth2token = gdata.gauth.OAuth2Token(client_id=credentials.client_id,
client_secret=credentials.client_secret,
scope='https://www.google.com/m8/feeds/contacts/default/full',
access_token=credentials.id_token,
refresh_token=credentials.refresh_token,
user_agent=config.APPLICATION_NAME)
self.client = gdata.contacts.client.ContactsClient()
auth2token.authorize(self.client)
self.email_to_name = {}
self.phone_to_name = {}
def load_contacts(self):
max_results = 99999
start_index = 1
query = gdata.contacts.client.ContactsQuery()
query.max_results = max_results
query.start_index = start_index
feed = self.client.GetContacts(q=query)
while feed:
for i, entry in enumerate(feed.entry):
if entry.name:
full_name = entry.name.full_name.text
primary_email = None
for email_entry in entry.email:
email = email_entry.address.lower()
if email_entry.primary and email_entry.primary=="true":
primary_email = email
if email in self.email_to_name:
print(u"Email address: '{}' is assigned to both '{}' and '{}'!".\
format(email, self.email_to_name[email], full_name))
else:
self.email_to_name[email] = (full_name, u'%s <%s>' % (full_name, email))
for phone_number_entry in entry.phone_number:
phone_number = Contacts.strip_and_reverse_phone_number(phone_number_entry.text)
if phone_number in self.phone_to_name:
print("Phone number: '%s' is assigned to both '%s' and '%s'!"%
(phone_number_entry.text, self.phone_to_name[phone_number], full_name))
else:
if primary_email:
self.phone_to_name[phone_number] = (
full_name, u'%s <%s>' % (full_name, primary_email))
else:
self.phone_to_name[phone_number] = (full_name, u'%s <%s>' % (full_name, phone_number_entry.text))
next_link = feed.GetNextLink()
if next_link:
feed = self.client.GetContacts(uri=next_link.href)
else:
feed = None
def get_by_phone_number(self, phone_number):
phone_number = Contacts.strip_and_reverse_phone_number(phone_number)
return self.phone_to_name.get(phone_number)
def get_by_email(self, email):
email = email.lower()
return self.email_to_name.get(email)
@staticmethod
def strip_and_reverse_phone_number(phone_number):
number = ''.join(ch for ch in phone_number if ch.isdigit())
if len(number)<3:
return phone_number
number = number[-9:]
number = number[::-1]
return number
|
mit
| 2,101,036,142,555,594,800
| 32.012346
| 105
| 0.665669
| false
| 3.098494
| false
| false
| false
|
bmswgnp/sdk
|
python/test.py
|
1
|
5131
|
#
# Simple test program for the Python Motion SDK.
#
# @file tools/sdk/python/test.py
# @author Luke Tokheim, luke@motionnode.com
# @version 2.2
#
# Copyright (c) 2015, Motion Workshop
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import sys
from MotionSDK import *
PortPreview = 32079
PortSensor = 32078
PortRaw = 32077
PortConfigurable = 32076
PortConsole = 32075
NSample = 10
def test_Client(host, port):
client = Client(host, port)
print "Connected to " + str(host) + ":" + str(port)
xml_string = "<?xml version=\"1.0\"?><configurable><preview><Gq/></preview><sensor><a/></sensor></configurable>"
if client.writeData(xml_string):
print "Sent active channel definition to Configurable service"
if client.waitForData():
sample_count = 0
while sample_count < NSample:
data = client.readData()
if None == data:
break
if PortPreview == port:
container = Format.Preview(data)
for key in container:
q = container[key].getQuaternion(False)
print "q(" + str(key) + ") = (" + str(q[0]) + ", " + str(q[1]) + "i, " + str(q[2]) + "j, " + str(q[3]) + "k)"
if PortSensor == port:
container = Format.Sensor(data)
for key in container:
a = container[key].getAccelerometer()
print "a(" + str(key) + ") = (" + str(a[0]) + ", " + str(a[1]) + ", " + str(a[2]) + ") g"
if PortRaw == port:
container = Format.Raw(data)
for key in container:
a = container[key].getAccelerometer()
print "a(" + str(key) + ") = (" + str(a[0]) + ", " + str(a[1]) + ", " + str(a[2]) + ")"
if PortConfigurable == port:
container = Format.Configurable(data)
for key in container:
line = "data(" + str(key) + ") = ("
for i in range(container[key].size()):
if i > 0:
line += ", "
line += str(container[key].value(i))
line += ")"
print line
sample_count += 1
def test_LuaConsole(host, port):
client = Client(host, port)
print("Connected to " + str(host) + ":" + str(port))
#
# General Lua scripting interface.
#
lua_chunk = \
"if not node.is_reading() then" \
" node.close()" \
" node.scan()" \
" node.start()" \
" end" \
" if node.is_reading() then" \
" print('Reading from ' .. node.num_reading() .. ' device(s)')" \
" else" \
" print('Failed to start reading')" \
" end"
print LuaConsole.SendChunk(client, lua_chunk, 5)
# Scripting language compatibility class. Translate
# Python calls into Lua calls and send them to the
# console service.
node = LuaConsole.Node(client)
print "node.is_reading() = " + str(node.is_reading())
def test_File():
filename = "../../test_data/sensor.bin";
print "reading take data file: \"" + filename + "\""
take_file = File(filename)
while True:
data = take_file.readData(9, True)
if None == data:
break
print Format.SensorElement(data).getAccelerometer()
def main(argv):
# Set the default host name parameter. The SDK is
# socket based so any networked Motion Service is
# available.
host = ""
if len(argv) > 1:
host = argv[1]
test_LuaConsole(host, PortConsole)
test_Client(host, PortPreview)
test_Client(host, PortSensor)
test_Client(host, PortRaw)
test_Client(host, PortConfigurable)
test_File()
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
bsd-2-clause
| 1,064,305,266,750,681,900
| 31.474684
| 129
| 0.588384
| false
| 3.881241
| true
| false
| false
|
bop/foundation
|
lib/python2.7/site-packages/staticfiles/urls.py
|
1
|
1283
|
import re
from django.conf import settings
from django.conf.urls.defaults import patterns, url
from django.core.exceptions import ImproperlyConfigured
urlpatterns = []
def static(prefix, view='django.views.static.serve', **kwargs):
"""
Helper function to return a URL pattern for serving files in debug mode.
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = patterns('',
# ... the rest of your URLconf goes here ...
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
"""
# No-op if not in debug mode or an non-local prefix
if not settings.DEBUG or (prefix and '://' in prefix):
return []
elif not prefix:
raise ImproperlyConfigured("Empty static prefix not permitted")
return patterns('',
url(r'^%s(?P<path>.*)$' % re.escape(prefix.lstrip('/')), view, kwargs=kwargs),
)
def staticfiles_urlpatterns(prefix=None):
"""
Helper function to return a URL pattern for serving static files.
"""
if prefix is None:
prefix = settings.STATIC_URL
return static(prefix, view='staticfiles.views.serve')
# Only append if urlpatterns are empty
if settings.DEBUG and not urlpatterns:
urlpatterns += staticfiles_urlpatterns()
|
gpl-2.0
| 5,661,768,768,317,236,000
| 31.075
| 86
| 0.686672
| false
| 4.29097
| false
| false
| false
|
SKIRT/PTS
|
core/tools/stringify.py
|
1
|
41185
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.core.tools.stringify Provides useful functions for converting objects of various types to strings.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
import copy
import warnings
# Import the relevant PTS classes and modules
from . import types
from . import introspection
from . import sequences
from . import strings
from . import numbers
# -----------------------------------------------------------------
def tostr(value, **kwargs):
"""
This function ...
:param value:
:param kwargs:
:return:
"""
# Get the 'scientific' flag
scientific = kwargs.get("scientific", None)
scientific_int = kwargs.pop("scientific_int", True) # also represent integers in scientific notation
# Set default number of decimal places
#decimal_places = kwargs.pop("decimal_places", None) # let it be done automatically in the str_from_... function
#print(str(value), "nd", decimal_places)
#ndigits = kwargs.pop("ndigits", None)
decimal_places = kwargs.get("decimal_places", None)
ndigits = kwargs.get("ndigits", None)
# Set scientific flag flexibly, if scientific flag was not passed explicitly
if scientific is None:
# Integer value
if (scientific_int and types.is_integer_type(value)) or (types.is_real_type(value) and numbers.is_integer(value)):
# Convert to be certain (if from float)
value = int(value)
#if -1e4 <= value <= 1e4: scientific = False
if -999 < value < 999:
scientific = False
if ndigits is None: decimal_places = 0
else: scientific = True
# No decimals for integers
#decimal_places = 0 YES: OF COURSE THERE MUST BE DECIMAL PLACES FOR SCIENTIFIC NOTATION
# Real value
elif types.is_real_type(value):
#if -1e4 <= value <= 1e4: scientific = False
if -999.99 < value < 999.99: scientific = False
else: scientific = True
# Quantity
elif introspection.lazy_isinstance(value, "Quantity", "astropy.units", return_false_if_fail=True):
if -999.99 < value.value < 999.99: scientific = False
else: scientific = True
elif introspection.lazy_isinstance(value, "QuantityRange", "pts.core.basics.range", return_false_if_fail=True):
if -999.99 < value.min.value and value.max.value < 999.99: scientific = False
else: scientific = True
elif introspection.lazy_isinstance(value, "RealRange", "pts.core.basics.range", return_false_if_fail=True):
if -999.99 < value.min and value.max < 999.99: scientific = False
else: scientific = True
elif introspection.lazy_isinstance(value, "IntegerRange", "pts.core.basics.range", return_false_if_fail=True):
if -999 < value.min and value.max < 999: scientific = False
else: scientific = True
# Other
else: scientific = False
#print("scien", scientific)
#print("dec", decimal_places)
#print("nd", ndigits)
# Set the options
kwargs["scientific"] = scientific
kwargs["decimal_places"] = decimal_places
kwargs["ndigits"] = ndigits
# Set scientific flag for integers
elif types.is_integer_type(value) or (types.is_real_type(value) and numbers.is_integer(value)):
if scientific:
# ONLY IF SCIENTIFIC_INT IS TRUE
if scientific_int:
# ONLY IF NECESSARY
if -999 < value < 999: scientific = False
else: scientific = True
# Don't apply 'scientific' to integers
else: scientific = False
# Set flag
kwargs["scientific"] = scientific
kwargs["ndigits"] = ndigits
# Stringify
return stringify(value, **kwargs)[1].strip()
# -----------------------------------------------------------------
def stringify(value, **kwargs):
"""
This function ...
:param value:
:param kwargs:
:return:
"""
# List or derived from list
if isinstance(value, list): return stringify_list(value, **kwargs)
# Dictionary
if isinstance(value, dict): return stringify_dict(value, **kwargs)
# Array or derived from Array, but not quantity!
#elif isinstance(value, np.ndarray) and not isinstance(value, Quantity):
#elif introspection.try_importing_module("numpy", True) and (isinstance(value, np.ndarray) and not hasattr(value, "unit")):
# WE ALSO TEST IF THIS IS NOT A NUMPY INTEGER, FLOAT OR BOOLEAN (because they have a __array__ attribute)
elif types.is_array_like(value): return stringify_array(value, **kwargs)
# Column or masked masked column
elif types.is_astropy_column(value): return stringify_array(value, **kwargs)
# Tuple or derived from tuple
elif isinstance(value, tuple): return stringify_tuple(value, **kwargs)
# All other
#else: return stringify_not_list(value, scientific=scientific, decimal_places=decimal_places, fancy=fancy, ndigits=ndigits, unicode=unicode, **kwargs)
else: return stringify_not_list(value, **kwargs)
# -----------------------------------------------------------------
def get_parsing_type(value):
"""
This function ...
:param value:
:return:
"""
ptype, string = stringify(value)
return ptype
# -----------------------------------------------------------------
def can_get_item(value):
"""
This function ...
:param value:
:return:
"""
#print(value, type(value))
try:
length = len(value)
except TypeError: return False
if len(value) == 0: return True
else:
try:
item = value[0]
return True
except IndexError: return False
# -----------------------------------------------------------------
def get_strings(values, return_types=False, value_kwargs=None, add_quotes=False, quote_character="'"):
"""
This function ...
:param values:
:param return_types:
:param value_kwargs:
:param add_quotes:
:param quote_character:
:return:
"""
if value_kwargs is None: value_kwargs = {}
strings = []
ptype = None
ptypes = set()
# Loop over the values
for entry in values:
# parsetype, val = stringify_not_list(entry)
parsetype, val = stringify(entry, **value_kwargs)
# from ..basics.configuration import parent_type
# if add_quotes and parent_type(parsetype) == "string":
if add_quotes and types.is_string_type(entry): val = quote_character + val + quote_character
if ptype is None: ptype = parsetype
elif ptype != parsetype:
# raise ValueError("Nonuniform list")
ptype = "mixed"
# Add the parse type
ptypes.add(parsetype)
# Add the string
strings.append(val)
# Return the strings
if return_types: return strings, list(ptypes), ptype
else: return strings
# -----------------------------------------------------------------
def stringify_list(value, **kwargs):
"""
This function ...
:param value:
:param kwargs:
:return:
"""
#print("kwargs", kwargs)
#if len(value) == 0: raise ValueError("Cannot stringify an empty list")
if len(value) == 0: return "list", ""
# If delimiter is passed for stringifying the values in the list
value_kwargs = copy.copy(kwargs)
if "value_delimiter" in value_kwargs: value_kwargs["delimiter"] = value_kwargs.pop("value_delimiter")
elif "delimiter" in value_kwargs: del value_kwargs["delimiter"]
# If delimiter is passed for stringifying the keys in the list
#key_kwargs = copy.copy(kwargs)
#if "key_delimiter" in key_kwargs: key_kwargs["delimiter"] = key_kwargs.pop("key_delimiter")
#elif "delimiter" in key_kwargs: del key_kwargs["delimiter"]
# If quotes have to be added
add_quotes = kwargs.pop("add_quotes", False)
quote_character = kwargs.pop("quote_character", "'")
# Get strings
strings, ptypes, ptype = get_strings(value, return_types=True, value_kwargs=value_kwargs, add_quotes=add_quotes, quote_character=quote_character)
from ..basics.configuration import parent_type
from ..basics.log import log
if len(ptypes) == 1: ptype = ptypes[0]
elif sequences.all_equal(ptypes): ptype = ptypes[0]
else:
# Investigate the different ptypes
parent_types = [parent_type(type_name) for type_name in ptypes]
# Check
for i in range(len(parent_types)):
if parent_types[i] is None: log.warning("Could not determine the parent type for '" + ptypes[i] + "'. All parent types: " + str(parent_types))
#print("Parent types:", parent_types)
if sequences.all_equal(parent_types) and parent_types[0] is not None: ptype = parent_types[0]
elif ptype == "mixed": log.warning("Could not determine a common type for '" + stringify(parent_types)[1] + "'")
# Get delimiter for list
delimiter = kwargs.pop("delimiter", ",")
# Return the type and the string
if ptype.endswith("list"):
top_delimiter = delimiter + " "
return ptype + "_list", top_delimiter.join(strings)
else: return ptype + "_list", delimiter.join(strings)
# -----------------------------------------------------------------
def represent_dict(value, **kwargs):
"""
Thisf unction ...
:param value:
:param kwargs:
:return:
"""
if len(value) == 0: return ""
# Only for stringifying the values
value_kwargs = copy.copy(kwargs)
# If delimiter is passed for stringifying the values in the list
if "value_delimiter" in value_kwargs: value_kwargs["delimiter"] = value_kwargs.pop("value_delimiter")
# Get identify symbol
identity_symbol = kwargs.pop("identity_symbol", ": ")
quote_key = kwargs.pop("quote_key", True)
quote_value = kwargs.pop("quote_value", True)
quote_character = kwargs.pop("quote_character", "'")
# Don't quote certain thingies
no_quote_keys = kwargs.pop("no_quote_keys", [])
no_quote_value_for_keys = kwargs.pop("no_quote_value_for_keys", [])
# Do quote certain thingies
quote_keys = kwargs.pop("quote_keys", [])
quote_value_for_keys = kwargs.pop("quote_value_for_keys", [])
replace_spaces_keys = kwargs.pop("replace_spaces_keys", None)
replace_spaces_values = kwargs.pop("replace_spaces_values", None)
replace_in_keys = kwargs.pop("replace_in_keys", None)
replace_in_values = kwargs.pop("replace_in_values", None)
parts = []
# Loop over the dictionary keys
for key in value:
# Stringify the key
ktype, kstring = stringify(key, **kwargs)
if replace_spaces_keys is not None: kstring = kstring.replace(" ", replace_spaces_keys)
if replace_in_keys is not None: kstring = strings.replace_from_dict(kstring, replace_in_keys)
v = value[key]
# Stringify the value
vtype, vstring = stringify(v, **value_kwargs)
if replace_spaces_values is not None: vstring = vstring.replace(" ", replace_spaces_values)
if replace_in_values is not None: vstring = strings.replace_from_dict(vstring, replace_in_values)
# Quote keys
if quote_key:
# Don't quote after all
if key in no_quote_keys: kstring_with_quotes = kstring
# Quote
else: kstring_with_quotes = quote_character + kstring + quote_character
# Don't quote keys
else:
# Quote after all
if key in quote_keys: kstring_with_quotes = quote_character + kstring + quote_character
# Don't quote
else: kstring_with_quotes = kstring
# DON't QUOTE THESE
if vtype == "integer" or vtype == "real" or vtype == "boolean": vstring_with_quotes = vstring
# Quote values
elif quote_value:
# Don't quote after all
if key in no_quote_value_for_keys: vstring_with_quotes = vstring
# Just quote
else: vstring_with_quotes = quote_character + vstring + quote_character
# Don't quote values
else:
# DO quote after all
if key in quote_value_for_keys: vstring_with_quotes = quote_character + vstring + quote_character
# Don't quote
else: vstring_with_quotes = vstring
# Determine line
string = kstring_with_quotes + identity_symbol + vstring_with_quotes
# Add line
parts.append(string)
# Get delimiter
delimiter = kwargs.pop("delimiter", ",")
# Return
return delimiter.join(parts)
# -----------------------------------------------------------------
def stringify_dict(value, **kwargs):
"""
This function ...
:param value:
:param kwargs:
:return:
"""
#if len(value) == 0: raise ValueError("Cannot stringify an empty dictionary")
if len(value) == 0: return "dictionary", ""
keytype = None
ptype = None
parts = []
keytypes = set()
ptypes = set()
# Only for stringifying the values
value_kwargs = copy.copy(kwargs)
# If delimiter is passed for stringifying the values in the list
if "value_delimiter" in value_kwargs: value_kwargs["delimiter"] = value_kwargs.pop("value_delimiter")
# Get identify symbol
identity_symbol = kwargs.pop("identity_symbol", ": ")
quote_key = kwargs.pop("quote_key", True)
quote_value = kwargs.pop("quote_value", True)
quote_character = kwargs.pop("quote_character", "'")
# Don't quote certain thingies
no_quote_keys = kwargs.pop("no_quote_keys", [])
no_quote_value_for_keys = kwargs.pop("no_quote_value_for_keys", [])
# Do quote certain thingies
quote_keys = kwargs.pop("quote_keys", [])
quote_value_for_keys = kwargs.pop("quote_value_for_keys", [])
replace_spaces_keys = kwargs.pop("replace_spaces_keys", None)
replace_spaces_values = kwargs.pop("replace_spaces_values", None)
replace_in_keys = kwargs.pop("replace_in_keys", None)
replace_in_values = kwargs.pop("replace_in_values", None)
# Loop over the dictionary keys
for key in value:
# Stringify the key
ktype, kstring = stringify(key, **kwargs)
if replace_spaces_keys is not None: kstring = kstring.replace(" ", replace_spaces_keys)
if replace_in_keys is not None: kstring = strings.replace_from_dict(kstring, replace_in_keys)
# Add key type
keytypes.add(ktype)
# Check key type
if keytype is None: keytype = ktype
elif keytype != ktype: keytype = "mixed"
v = value[key]
# Stringify the value
vtype, vstring = stringify(v, **value_kwargs)
if replace_spaces_values is not None: vstring = vstring.replace(" ", replace_spaces_values)
if replace_in_values is not None: vstring = strings.replace_from_dict(vstring, replace_in_values)
# Add value type
ptypes.add(vtype)
# Check value type
if ptype is None: ptype = vtype
elif ptype != vtype: ptype = "mixed"
# if quote_key and key not in no_quote_keys: kstring_with_quotes = quote_character + kstring + quote_character
# else: kstring_with_quotes = kstring
# Quote keys
if quote_key:
# Don't quote after all
if key in no_quote_keys: kstring_with_quotes = kstring
# Quote
else: kstring_with_quotes = quote_character + kstring + quote_character
# Don't quote keys
else:
# Quote after all
if key in quote_keys: kstring_with_quotes = quote_character + kstring + quote_character
# Don't quote
else: kstring_with_quotes = kstring
#if ptype == "integer" or ptype == "real" or ptype == "boolean": vstring_with_quotes = vstring
#elif quote_value and key not in no_quote_value_for_keys: vstring_with_quotes = quote_character + vstring + quote_character
#else: vstring_with_quotes = vstring
# DON't QUOTE THESE
if ptype == "integer" or ptype == "real" or ptype == "boolean": vstring_with_quotes = vstring
# Quote values
elif quote_value:
# Don't quote after all
if key in no_quote_value_for_keys: vstring_with_quotes = vstring
# Just quote
else: vstring_with_quotes = quote_character + vstring + quote_character
# Don't quote values
else:
# DO quote after all
if key in quote_value_for_keys: vstring_with_quotes = quote_character + vstring + quote_character
# Don't quote
else: vstring_with_quotes = vstring
# Determine line
string = kstring_with_quotes + identity_symbol + vstring_with_quotes
# Add line
parts.append(string)
from ..basics.configuration import parent_type
from ..basics.log import log
keytypes = list(keytypes)
ptypes = list(ptypes)
# Investigate the different keytypes
parent_key_types = [parent_type(type_name) for type_name in keytypes]
#print("Parent key types:", parent_key_types)
# Check
for i in range(len(parent_key_types)):
if parent_key_types[i] is None: log.warning("Could not determine the parent type for '" + keytypes[i] + "'. All parent types: " + str(parent_key_types))
if sequences.all_equal(parent_key_types) and parent_key_types[0] is not None: ptype = parent_key_types[0]
elif keytype == "mixed": log.warning("Could not determine a common type for '" + stringify(parent_key_types)[1] + "'")
# Investigate the different value types
parent_value_types = [parent_type(type_name) for type_name in ptypes]
# Check
for i in range(len(parent_value_types)):
if parent_value_types[i] is None: log.warning("Could not determine the parent type for '" + ptypes[i] + "'. All parent types: " + str(parent_value_types))
#print("Parent value types:", parent_value_types)
if sequences.all_equal(parent_value_types) and parent_value_types[0] is not None: ptype = parent_value_types[0]
elif ptype == "mixed": log.warning("Could not determine a common type for '" + stringify(parent_value_types)[1] + "'")
# Get delimiter
delimiter = kwargs.pop("delimiter", ",")
# Return
return keytype + "_" + ptype + "_dictionary", delimiter.join(parts)
# -----------------------------------------------------------------
def stringify_array(value, **kwargs):
"""
This function ...
:param value:
:param kwargs:
:return:
"""
# Get delimiter
delimiter = kwargs.pop("delimiter", ",")
ptype, val = stringify_not_list(value[0], **kwargs)
if ptype is None: return "array", delimiter.join([repr(el) for el in value])
else: return ptype + "_array", delimiter.join([repr(el) for el in value])
#ptype, val = stringify_not_list(value[0])
#return ptype + "_array", ",".join([repr(el) for el in value])
# -----------------------------------------------------------------
def stringify_tuple(value, **kwargs):
"""
This function ...
:param value:
:param kwargs:
:return:
"""
value_kwargs = copy.copy(kwargs)
if "value_delimiter" in value_kwargs: value_kwargs["delimiter"] = value_kwargs.pop("value_delimiter")
#print("kwargs", kwargs)
strings = []
ptype = None
for entry in value:
#parsetype, val = stringify_not_list(entry, **kwargs)
parsetype, val = stringify(entry, **kwargs)
if ptype is None:
ptype = parsetype
elif ptype != parsetype:
#raise ValueError("Nonuniform tuple")
warnings.warn("Nonuniform tuple")
ptype = "mixed"
strings.append(val)
# Get delimiter
delimiter = kwargs.pop("delimiter", ",")
# Return
if ptype is not None: return ptype + "_tuple", delimiter.join(strings)
else: return "tuple", delimiter.join(strings)
# -----------------------------------------------------------------
def stringify_not_list(value, **kwargs):
"""
This function does stringify, but not for iterables
:param value:
:param kwargs:
:return:
"""
# Standard
if types.is_boolean_type(value): return "boolean", str_from_bool(value, **kwargs)
elif types.is_integer_type(value): return "integer", str_from_integer(value, **kwargs)
elif types.is_real_type(value): return "real", str_from_real(value, **kwargs)
elif types.is_string_type(value): return "string", value
elif types.is_none(value): return "None", kwargs.pop("none_string", "None")
# Unit, quantity, angle
elif introspection.lazy_isinstance(value, "UnitBase", "astropy.units"): return introspection.lazy_call("stringify_unit", "pts.core.units.stringify", value, **kwargs)
elif introspection.lazy_isinstance(value, "Quantity", "astropy.units"): return introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", value, **kwargs)
elif introspection.lazy_isinstance(value, "Angle", "astropy.coordinates"): return "angle", str_from_angle(value, **kwargs)
# Range
elif introspection.lazy_isinstance(value, "RealRange", "pts.core.basics.range"): return "real_range", str_from_real_range(value, **kwargs)
elif introspection.lazy_isinstance(value, "IntegerRange", "pts.core.basics.range"): return "integer_range", str_from_integer_range(value, **kwargs)
elif introspection.lazy_isinstance(value, "QuantityRange", "pts.core.basics.range"): return "quantity_range", introspection.lazy_call("str_from_quantity_range", "pts.core.units.stringify", value, **kwargs)
# Coordinates
elif introspection.lazy_isinstance(value, "SkyCoordinate", "pts.magic.basics.coordinate"): return "skycoordinate", str_from_coordinate(value, **kwargs)
elif introspection.lazy_isinstance(value, "PixelCoordinate", "pts.magic.basics.coordinate"): return "pixelcoordinate", str_from_pixelcoordinate(value, **kwargs)
elif introspection.lazy_isinstance(value, "PhysicalCoordinate", "pts.magic.basics.coordinate"): return "physicalcoordinate", str_from_physicalcoordinate(value, **kwargs)
# Stretch
#elif introspection.lazy_isinstance(value, "SkyStretch", "pts.magic.basics.stretch"): return "skystretch", str_from_stretch(value, **kwargs)
# Extents
elif introspection.lazy_isinstance(value, "SkyExtent", "pts.magic.basics.stretch"): return "sky_extent", str_from_angle_extent(value, **kwargs)
elif introspection.lazy_isinstance(value, "PhysicalExtent", "pts.magic.basics.stretch"): return "physical_extent", str_from_quantity_extent(value, **kwargs)
elif introspection.lazy_isinstance(value, "IntegerExtent", "pts.magic.basics.vector"): return "integer_extent", str_from_integer_extent(value, **kwargs)
elif introspection.lazy_isinstance(value, "RealExtent", "pts.magic.basics.vector"): return "real_extent", str_from_real_extent(value, **kwargs)
elif introspection.lazy_isinstance(value, "AngleExtent", "pts.magic.basics.vector"): return "angle_extent", str_from_angle_extent(value, **kwargs)
elif introspection.lazy_isinstance(value, "QuantityExtent", "pts.magic.basics.vector"): return "quantity_extent", str_from_quantity_extent(value, **kwargs)
# Filter
elif introspection.lazy_isinstance(value, "Filter", "pts.core.filter.filter"): return introspection.lazy_call("stringify_filter", "pts.core.filter.filter", value, **kwargs)
# Pixelscale
elif introspection.lazy_isinstance(value, "Pixelscale", "pts.magic.basics.pixelscale"): return "pixelscale", str(value)
# Parallelization
elif introspection.lazy_isinstance(value, "Parallelization", "pts.core.simulation.parallelization"): return "parallelization", introspection.lazy_call("represent_parallelization", "pts.core.simulation.parallelization", value)
# Host
elif introspection.lazy_isinstance(value, "Host", "pts.core.remote.host"): return "host", str_from_host(value)
# Unrecognized
else:
warnings.warn("Unrecognized type: " + str(type(value)))
return None, str(value)
# -----------------------------------------------------------------
def str_from_host(host):
"""
This function ...
:param host:
:return:
"""
if host.cluster_name is not None: return host.id + ":" + host.cluster_name
else: return host.id
# -----------------------------------------------------------------
def str_from_dictionary(dictionary, **kwargs):
"""
This function ...
:param dictionary:
:param kwargs:
:return:
"""
parts = []
for key in dictionary:
value = dictionary[key]
vtype, vstring = stringify(value, **kwargs)
string = key + ": " + vstring
parts.append(string)
return ",".join(parts)
# -----------------------------------------------------------------
def stringify_string_fancy(string, **kwargs):
"""
This function ...
:param string:
:return:
"""
width = kwargs.pop("width", 100)
lines_prefix = kwargs.pop("lines_prefix", "")
from textwrap import wrap
return "string", lines_prefix + ("\n" + lines_prefix).join(wrap(string, width))
# -----------------------------------------------------------------
def stringify_list_fancy(lst, **kwargs):
"""
This function ...
:param lst:
:param kwargs:
:return:
"""
width = kwargs.pop("width", 100)
delimiter = kwargs.pop("delimiter", ", ")
lines_prefix = kwargs.pop("lines_prefix", "")
colour = kwargs.pop("colour", None)
colour_indices = kwargs.pop("colour_indices", None) # colour only certain indices
from textwrap import wrap
ptype, string = stringify(lst)
if colour is not None:
from .formatting import get_color_code, reset
code = get_color_code(colour)
if colour_indices is not None:
parts = string.split(",")
new_parts = []
for index, part in enumerate(parts):
if index in colour_indices: new_part = code + part + reset
else: new_part = part
new_parts.append(new_part)
string = ",".join(new_parts)
else: string = code + string + reset
return ptype, lines_prefix + ("\n" + lines_prefix).join(wrap(string.replace(",", delimiter), width))
# -----------------------------------------------------------------
def get_list_string_max_nvalues(lst, nvalues, **kwargs):
"""
This function ...
:param lst:
:param values:
:param kwargs:
:return:
"""
# Define string
ellipsis = ", ... , "
# Get options
delimiter = kwargs.pop("delimiter", ", ")
# Get strings
strings = get_strings(lst)
# Return
if len(lst) <= nvalues: return delimiter.join(strings)
# Add ellipses
else:
if nvalues % 2 == 0: nbegin = nend = int(0.5 * nvalues)
else:
nbegin = int(0.5 * nvalues)
nend = nvalues - nbegin
# Create string, return
return delimiter.join(strings[:nbegin]) + ellipsis + delimiter.join(strings[-nend:])
# -----------------------------------------------------------------
def stringify_paths(paths, **kwargs):
"""
This function ...
:param paths:
:param kwargs:
:return:
"""
# Get options
base = kwargs.pop("basse", None)
if base is None: return "path_list", stringify_list(paths)[1]
else:
from . import filesystem as fs
absolute_base = fs.absolute_path(base)
# Return the type and the relative paths as a string list
return "string_list", stringify_list([fs.absolute_path(path).split(absolute_base)[1] for path in paths])[1]
# -----------------------------------------------------------------
def str_from_integer(integer, **kwargs):
"""
This function ...
:param integer:
:param kwargs:
:return:
"""
# Get settings
scientific = kwargs.pop("scientific", False)
decimal_places = kwargs.pop("decimal_places", None)
fancy = kwargs.pop("fancy", False)
ndigits = kwargs.pop("ndigits", None)
unicode = kwargs.pop("unicode", False)
html = kwargs.pop("html", False)
# Check input
if ndigits is not None and ndigits < 1: raise ValueError("Number of digits cannot be smaller than 1")
if ndigits is not None and decimal_places is not None: raise ValueError("Cannot specify both number of decimal places and number of digits")
# Set ndigits and number of decimal places
if ndigits is not None:
if scientific: decimal_places = ndigits - 1
else: pass
elif decimal_places is not None:
if scientific: ndigits = decimal_places + 1
else: pass
else: decimal_places = 2 # default value for when ndigits is not specified
#print(scientific, decimal_places, ndigits)
# Scientific notation
if scientific:
if fancy:
if ndigits is not None:
power = len(str(integer)) - 1
digits = []
str_rounded = str(integer)
for i in range(ndigits):
digit = str_rounded[i]
digits.append(digit)
if html: return digits[0] + "." + "".join(digits[1:]) + " × 10<sup>" + str(power) + "</sup>"
elif unicode: return digits[0].decode("utf8") + u"." + u"".join(digits[1:]) + u" " + strings.multiplication + u" 10" + strings.superscript(power) # DOESN'T WORK??
else: return digits[0] + "." + "".join(digits[1:]) + " x 10^" + str(power)
else:
result = "{:.0e}".format(integer).replace("+", "").replace("e0", "e")
power = int(result.split("e")[1])
if html: result = result.split("e")[0] + " × 10<sup>" + str(power) + "</sup>"
elif unicode: result = result.split("e")[0].decode("utf8") + u" " + strings.multiplication + u" 10" + strings.superscript(power) # DOESN'T WORK
else: result = result.split("e")[0] + " x 10^" + str(power)
return result
else:
if ndigits is not None: decimal_places = ndigits - 1
if html: return ("{:." + str(decimal_places) + "e}").format(float(integer)).replace("+", "").replace("e0", " × 10<sup>") + "</sup>"
else: return ("{:." + str(decimal_places) + "e}").format(float(integer)).replace("+", "").replace("e0", "e")
# Not scientific
else: return str(integer)
# -----------------------------------------------------------------
#def str_from_integer_range(the_range, scientific=False, decimal_places=2, fancy=False, ndigits=None, unicode=False, **kwargs):
def str_from_integer_range(the_range, **kwargs):
"""
Thi function ...
:param the_range:
:param kwargs:
:return:
"""
min_str = str_from_integer(the_range.min, **kwargs)
max_str = str_from_integer(the_range.max, **kwargs)
return min_str + " > " + max_str
# -----------------------------------------------------------------
def str_from_real(real, **kwargs):
"""
This function ...
:param real:
:param kwargs:
:return:
"""
# Get kwargs
scientific = kwargs.pop("scientific", False)
decimal_places = kwargs.pop("decimal_places", None)
fancy = kwargs.pop("fancy", False)
ndigits = kwargs.pop("ndigits", None)
unicode = kwargs.pop("unicode", False)
doround = kwargs.pop("round", False)
html = kwargs.pop("html", False)
#print(decimal_places, ndigits)
# Check input
if ndigits is not None and ndigits < 1: raise ValueError("Number of digits cannot be smaller than 1")
if ndigits is not None and decimal_places is not None: raise ValueError("Cannot specify both number of decimal places and number of digits")
# Set ndigits and number of decimal places
if ndigits is not None:
if scientific: decimal_places = ndigits - 1
else: pass
elif decimal_places is not None:
if scientific: ndigits = decimal_places + 1
else: pass
else: decimal_places = 2 # default value for when ndigits is not specified
#print(decimal_places, ndigits)
# Scientific notation
if scientific:
# Fancy
if fancy:
if ndigits is not None:
if "e" in str(real): power = int(str(real).split("e")[1])
else: power = len(str(real).split(".")[0]) - 1
digits = []
rounded = numbers.round_to_n_significant_digits(real, ndigits)
str_rounded = str(rounded)
#print(str_rounded)
#if "." in str_rounded: enditeration = ndigits + 1
#else: enditeration = ndigits
if "." in str_rounded: str_rounded = "".join(str_rounded.split("."))
for i in range(ndigits):
digit = str_rounded[i]
#if digit == ".": continue # happens if rounded does stil contain dot
digits.append(digit)
#print("digits", digits)
if html: return digits[0] + "." + "".join(digits[1:]) + " × 10<sup>" + str(power) + "</sup>"
elif unicode: return digits[0].decode("utf8") + u"." + u"".join(digits[1:]) + u" " + strings.multiplication + u" 10" + strings.superscript(power).decode("utf8") # DOESN'T WORK??
else: return digits[0] + "." + "".join(digits[1:]) + " x 10^" + str(power)
else:
result = ("{:." + str(decimal_places) + "e}").format(real).replace("+", "").replace("e0", "e")
power = int(result.split("e")[1])
#result = result.split("e")[0].decode("utf8") + u" " + strings.multiplication + u" 10" + strings.superscript(power).decode("utf8")
#result = result.split("e")[0].decode("utf8") + u" " + strings.multiplication + u" 10" + strings.superscript(power).decode("utf8")
if html: result = result.split("e")[0] + " × 10<sup>" + str(power) + "</sup>"
elif unicode: result = result.split("e")[0].decode("utf8") + u" " + u"x" + u" 10" + strings.superscript(power).decode("utf8") # SOMETHING LIKE THIS?? DOESN'T WORK??
else: result = result.split("e")[0] + " x 10^" + str(power)
return result
else:
if ndigits is not None: decimal_places = ndigits - 1
if html: return ("{:." + str(decimal_places) + "e}").format(real).replace("+", "").replace("e0", " × 10<sup>") + "</sup>"
else: return ("{:." + str(decimal_places) + "e}").format(real).replace("+", "").replace("e0", "e")
else:
if doround:
#numbers.order_of_magnitude()
if ndigits is not None: return repr(numbers.round_to_n_significant_digits(real, ndigits))
else:
primary_ndigits = numbers.order_of_magnitude(real) + 1
ndigits = decimal_places + primary_ndigits
if ndigits < 1:
warnings.warn("The small number '" + repr(real) + "' cannot be represented with only " + str(decimal_places) + " decimal places: using scientific notation")
return str_from_real(real, scientific=True, ndigits=decimal_places+1)
else:
#print(decimal_places, primary_ndigits, ndigits)
return ("{:." + str(ndigits) + "}").format(real)
else: return repr(real)
# -----------------------------------------------------------------
#def str_from_real_range(the_range, scientific=False, decimal_places=2, fancy=False, ndigits=None, unicode=False, **kwargs):
def str_from_real_range(the_range, **kwargs):
"""
This function ...
:param the_range:
:param kwargs:
:return:
"""
min_str = str_from_real(the_range.min, **kwargs)
max_str = str_from_real(the_range.max, **kwargs)
return min_str + " > " + max_str
# -----------------------------------------------------------------
def str_from_coordinate(coordinate, **kwargs):
"""
This function ...
:param coordinate:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", coordinate.ra, **kwargs)[1] + delimiter + introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", coordinate.dec, **kwargs)[1]
# -----------------------------------------------------------------
def str_from_pixelcoordinate(coordinate, **kwargs):
"""
This function ...
:param coordinate:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return repr(coordinate.x) + delimiter + repr(coordinate.y)
# -----------------------------------------------------------------
def str_from_physicalcoordinate(coordinate, **kwargs):
"""
This function ...
:param coordinate:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", coordinate.x, **kwargs)[1] + delimiter + introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", coordinate.y, **kwargs)[1]
# -----------------------------------------------------------------
def str_from_stretch(stretch, **kwargs):
"""
This function ...
:param stretch:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", stretch.ra,
**kwargs)[1] + delimiter + introspection.lazy_call("stringify_quantity",
"pts.core.units.stringify",
stretch.dec, **kwargs)[1]
# -----------------------------------------------------------------
def str_from_angle_extent(extent, **kwargs):
"""
This function ...
:param extent:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return str_from_angle(extent.x, **kwargs) + delimiter + str_from_angle(extent.y, **kwargs)
# -----------------------------------------------------------------
def str_from_quantity_extent(extent, **kwargs):
"""
Thisf unction ...
:param extent:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", extent.x, **kwargs)[1] + delimiter + \
introspection.lazy_call("stringify_quantity", "pts.core.units.stringify", extent.y, **kwargs)[1]
# -----------------------------------------------------------------
def str_from_integer_extent(extent, **kwargs):
"""
This function ...
:param extent:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return str_from_integer(extent.x, **kwargs) + delimiter + str_from_integer(extent.y, **kwargs)
# -----------------------------------------------------------------
def str_from_real_extent(extent, **kwargs):
"""
This function ...
:param extent:
:param kwargs:
:return:
"""
delimiter = kwargs.pop("delimiter", ",")
# Return
return str_from_real(extent.x, **kwargs) + delimiter + str_from_real(extent.y, **kwargs)
# -----------------------------------------------------------------
def yes_or_no(boolean, **kwargs):
"""
This function ...
:param boolean:
:param kwargs:
:return:
"""
# Get options
short = kwargs.pop("short", False)
answer = "yes" if boolean else "no"
if short: return answer[0]
else: return answer
# -----------------------------------------------------------------
def str_from_bool(boolean, **kwargs):
"""
This function ...
:param boolean:
:param kwargs:
:return:
"""
# Get options
lower = kwargs.pop("lower", False)
if lower: return str(boolean).lower()
else: return str(boolean)
# -----------------------------------------------------------------
def str_from_angle(angle, **kwargs):
"""
This function ...
:param angle:
:param kwargs:
:return:
"""
return str_from_real(angle.value, **kwargs) + " " + str(angle.unit).replace(" ", "")
# -----------------------------------------------------------------
|
agpl-3.0
| -4,237,823,556,950,689,000
| 32.980198
| 229
| 0.577846
| false
| 3.923033
| false
| false
| false
|
ZuraK/aVarCode
|
py_prototyping/hex.py
|
1
|
7514
|
# File:
# Desc:
import math;
# param: hType, hexType, 0 for Flat topped, 30 if Pointy topped
# param: center, Vector2Point, hex center
# param: radius, size of hex
# param: index, indexPoint corner of hex, 0-5
# returns: Vector2Point hex corner
def GeneratePointHEX(hType, center, radius, index):
vec[0];
angle_deg = 60 * index + hType; # 0 if Flat, 30 if Pointy
angle_rad = math.pi / 180 * angle_deg;
vec[0].x = center.x + size * cos(angle_rad);
vec[0].y = center.y + size * sin(angle_rad);
return vec;
# param: hType, hexType, 0 for Flat topped, 30 if Pointy topped
# param: center, Vector2Point, hex center
# param: radius, size of hex
# returns: Vector2[] hex corners
def GenerateVectorsHEX(hType, center, radius):
vec[0];
for val in range(6):
angle_deg = 60 * val + hType; # 0 if Flat, 30 if Pointy
angle_rad = math.pi / 180 * angle_deg;
vec[val].x = center.x + size * cos(angle_rad);
vec[val].y = center.y + size * sin(angle_rad);
return vec;
def PrintInfo():
print "=====[[ Hexagons ]]=====";
print "(00) Definitons, Equations, ";
print "(01) Storage, Tables, "
print "(02) Generation, ";
return;
# HexEdges, Indices
# A 0,1;
# B 1,2;
# C 2,3;
# D 3,4;
# E 4,5;
# F 5,0;
# HexTriangles, Indices (Index 6 as Center)
# A 6,0,1;
# B 6,1,2;
# etc
# Triangle Fan -> Center(0),First(1),Second(2), ...
# Hexagon area:
# A = ((3 sqrt 3) / 2 ) size^2
# Perimeter: 6 * size
# Slices 60 deg, 60 deg, 60 deg
# Total internal angles: 720 deg
# Internal angle: 120 deg
#
dirs:
flat
Lines: East, SouthEast, SouthWest, West, NorthWest, NorthEast
Edges: SouthEast, South, SouthWest, NorthWest, North, NorthEast
pointy
Lines: SouthEast, South, SouthWest, NorthWest, North, NorthEast
Edges: East, SouthEast, SouthWest, West, NorthWest, NorthEast
# Unicode Character 'WHITE HEXAGON' (U+2B21)
# HTML Entity (decimal) ⬡
# HTML Entity (hex) ⬡
# How to type in Microsoft Windows Alt +2B21
# UTF-8 (hex) 0xE2 0xAC 0xA1 (e2aca1)
# UTF-8 (binary) 11100010:10101100:10100001
# UTF-16 (hex) 0x2B21 (2b21)
# UTF-16 (decimal) 11,041
# UTF-32 (hex) 0x00002B21 (2B21)
# UTF-32 (decimal) 11,041
# C/C++/Java source code "\u2B21"
# Python source code u"\u2B21"
# Unicode Character 'BLACK HEXAGON' (U+2B22)
# HTML Entity (decimal) ⬢
# HTML Entity (hex) ⬢
# How to type in Microsoft Windows Alt +2B22
# UTF-8 (hex) 0xE2 0xAC 0xA2 (e2aca2)
# UTF-8 (binary) 11100010:10101100:10100010
# UTF-16 (hex) 0x2B22 (2b22)
# UTF-16 (decimal) 11,042
# UTF-32 (hex) 0x00002B22 (2b22)
# UTF-32 (decimal) 11,042
# C/C++/Java source code "\u2B22"
# Python source code u"\u2B22"
# hex grid flat, vertical orientation
# Width = HexSize * 2
# horiz = width * 3/4
# height = sqrt(3)/2 * width.
# dist vertical = height.
# hex grid pointy, horizontal orientation
# height = hxsize * 2
# vert = height * 3/4
# width = sqrt(3)/2 * height.
# dist horiz = width.
offset coords
# Pointy top Pointy top
# "odd-r" Horizontal layout "even-r" Horizontal layout
# (0,0) (1,0) (2,0) (3,0) (4,0) (0,0) (1,0) (2,0) (3,0) (4,0)
# (0,1) (1,1) (2,1) (3,1) (4,1) (0,1) (1,1) (2,1) (3,1) (4,1)
# (0,2) (1,2) (2,2) (3,2) (4,2) (0,2) (1,2) (2,2) (3,2) (4,2)
# (0,3) (1,3) (2,3) (3,3) (4,3) (0,3) (1,3) (2,3) (3,3) (4,3)
# (0,4) (1,4) (2,4) (3,4) (4,4) (0,4) (1,4) (2,4) (3,4) (4,4)
# Flat top Flat top
# "odd-q" Vertical layout "even-q" Vertical layout
# (0,0) (2,0) (4,0) (1,0) (3,0) (5,0)
# (1,0) (3,0) (5,0) (0,0) (2,0) (4,0)
# (0,1) (2,1) (4,1) (1,1) (3,1) (5,1)
# (1,1) (3,1) (4,1) (0,1) (2,1) (4,1)
# (0,2) (2,2) (4,2) (1,2) (3,2) (5,2)
# (1,2) (3,2) (5,2) (0,2) (2,2) (4,2)
cube coords
axial coords
interlaced/doubled coords
Coord conversions::
function cube_to_hex(h): # axial
var q = h.x
var r = h.z
return Hex(q, r)
function hex_to_cube(h): # axial
var x = h.q
var z = h.r
var y = -x-z
return Cube(x, y, z)
# convert cube to even-q offset
col = x
row = z + (x + (x&1)) / 2
# convert even-q offset to cube
x = col
z = row - (col + (col&1)) / 2
y = -x-z
# convert cube to odd-q offset
col = x
row = z + (x - (x&1)) / 2
# convert odd-q offset to cube
x = col
z = row - (col - (col&1)) / 2
y = -x-z
# convert cube to even-r offset
col = x + (z + (z&1)) / 2
row = z
# convert even-r offset to cube
x = col - (row + (row&1)) / 2
z = row
y = -x-z
# convert cube to odd-r offset
col = x + (z - (z&1)) / 2
row = z
# convert odd-r offset to cube
x = col - (row - (row&1)) / 2
z = row
y = -x-z
NEIGHBOURS::
>>cube<<
var directions = [
Cube(+1, -1, 0), Cube(+1, 0, -1), Cube( 0, +1, -1),
Cube(-1, +1, 0), Cube(-1, 0, +1), Cube( 0, -1, +1)
]
function cube_direction(direction):
return directions[direction]
function cube_neighbor(hex, direction):
return cube_add(hex, cube_direction(direction))
>>axial<<
var directions = [
Hex(+1, 0), Hex(+1, -1), Hex( 0, -1),
Hex(-1, 0), Hex(-1, +1), Hex( 0, +1)
]
function hex_direction(direction):
return directions[direction]
function hex_neighbor(hex, direction):
var dir = hex_direction(direction)
return Hex(hex.q + dir.q, hex.r + dir.r)
>>offset<< (4 different implementations depending on grid type)
>>odd-r<<
var directions = [
[ Hex(+1, 0), Hex( 0, -1), Hex(-1, -1),
Hex(-1, 0), Hex(-1, +1), Hex( 0, +1) ],
[ Hex(+1, 0), Hex(+1, -1), Hex( 0, -1),
Hex(-1, 0), Hex( 0, +1), Hex(+1, +1) ]
]
function offset_neighbor(hex, direction):
var parity = hex.row & 1
var dir = directions[parity][direction]
return Hex(hex.col + dir.col, hex.row + dir.row)
>>even-r<<
var directions = [
[ Hex(+1, 0), Hex(+1, -1), Hex( 0, -1),
Hex(-1, 0), Hex( 0, +1), Hex(+1, +1) ],
[ Hex(+1, 0), Hex( 0, -1), Hex(-1, -1),
Hex(-1, 0), Hex(-1, +1), Hex( 0, +1) ]
]
function offset_neighbor(hex, direction):
var parity = hex.row & 1
var dir = directions[parity][direction]
return Hex(hex.col + dir.col, hex.row + dir.row)
>>odd-q<<
var directions = [
[ Hex(+1, 0), Hex(+1, -1), Hex( 0, -1),
Hex(-1, -1), Hex(-1, 0), Hex( 0, +1) ],
[ Hex(+1, +1), Hex(+1, 0), Hex( 0, -1),
Hex(-1, 0), Hex(-1, +1), Hex( 0, +1) ]
]
function offset_neighbor(hex, direction):
var parity = hex.col & 1
var dir = directions[parity][direction]
return Hex(hex.col + dir.col, hex.row + dir.row)
>>even-q<<
var directions = [
[ Hex(+1, +1), Hex(+1, 0), Hex( 0, -1),
Hex(-1, 0), Hex(-1, +1), Hex( 0, +1) ],
[ Hex(+1, 0), Hex(+1, -1), Hex( 0, -1),
Hex(-1, -1), Hex(-1, 0), Hex( 0, +1) ]
]
function offset_neighbor(hex, direction):
var parity = hex.col & 1
var dir = directions[parity][direction]
return Hex(hex.col + dir.col, hex.row + dir.row)
>>Diagonals<<
var diagonals = [
Cube(+2, -1, -1), Cube(+1, +1, -2), Cube(-1, +2, -1),
Cube(-2, +1, +1), Cube(-1, -1, +2), Cube(+1, -2, +1)
]
function cube_diagonal_neighbor(hex, direction):
return cube_add(hex, diagonals[direction])
|
gpl-2.0
| -3,974,613,827,194,157,600
| 25.090278
| 69
| 0.534602
| false
| 2.438027
| false
| false
| false
|
danielfreeman11/convex-nets
|
LaunchScripts/CIFAR10.py
|
1
|
29360
|
#Imports and model parameters
from __future__ import absolute_import
from __future__ import division
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
#mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
#Simple network: Given three integers a,b,c, [-100,100] chooses three random x-values, and evaluates
#the quadratic function a*x^2 + b*x + c at those values.
import copy
from datetime import datetime
import os.path
import time
import math
import gzip
import os
import re
import sys
import tarfile
from six.moves import urllib
import tensorflow as tf
from tensorflow.models.image.cifar10 import cifar10_input
from tensorflow.models.image.cifar10 import cifar10
for num_run in xrange(1):
alpha,hidden_dim,hidden_dim2 = (.001,4,4)
thresh = .95
if num_run%4 == 0:
thresh = .8
if num_run%4 == 1:
thresh = .6
if num_run%4 == 2:
thresh = .4
if num_run%4 == 3:
thresh = .35
cost_thresh = 1.0
# Parameters
learning_rate = 0.001
training_epochs = 15
#batch_size = 100
display_step = 1
# Network Parameters
n_hidden_1 = 256 # 1st layer number of features
n_hidden_2 = 256 # 2nd layer number of features
n_input = 784 # Guess quadratic function
n_classes = 10 #
#synapses = []
#from __future__ import print_function
tf.logging.set_verbosity(tf.logging.FATAL)
FLAGS = tf.app.flags.FLAGS
# Basic model parameters.
batch_size = 128
data_dir = '/tmp/cifar10_data'
use_fp16 = False
train_dir= '/tmp/cifar10_train'
max_steps=1000000
num_examples=10000
log_device_placement=False
# Global constants describing the CIFAR-10 data set.
IMAGE_SIZE = cifar10_input.IMAGE_SIZE
NUM_CLASSES = cifar10_input.NUM_CLASSES
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = cifar10_input.NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = cifar10_input.NUM_EXAMPLES_PER_EPOCH_FOR_EVAL
# Constants describing the training process.
MOVING_AVERAGE_DECAY = 0.9999 # The decay to use for the moving average.
NUM_EPOCHS_PER_DECAY = 350.0 # Epochs after which learning rate decays.
LEARNING_RATE_DECAY_FACTOR = 0.1 # Learning rate decay factor.
INITIAL_LEARNING_RATE = 0.1 # Initial learning rate.
# If a model is trained with multiple GPUs, prefix all Op names with tower_name
# to differentiate the operations. Note that this prefix is removed from the
# names of the summaries when visualizing a model.
TOWER_NAME = 'tower'
DATA_URL = 'http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz'
models = []
#Testing starting in the same place
#synapse0 = 2*np.random.random((1,hidden_dim)) - 1
#synapse1 = 2*np.random.random((hidden_dim,hidden_dim2)) - 1
#synapse2 = 2*np.random.random((hidden_dim2,1)) - 1
#Function definitions
def func(x,a,b,c):
return x*x*a + x*b + c
def flatten(x):
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, basestring):
result.extend(flatten(el))
else:
result.append(el)
return result
def generatecandidate4(a,b,c,tot):
candidate = [[np.random.random() for x in xrange(1)] for y in xrange(tot)]
candidatesolutions = [[func(x[0],a,b,c)] for x in candidate]
return (candidate, candidatesolutions)
def synapse_interpolate(synapse1, synapse2, t):
return (synapse2-synapse1)*t + synapse1
def model_interpolate(w1,b1,w2,b2,t):
m1w = w1
m1b = b1
m2w = w2
m2b = b2
mwi = [synapse_interpolate(m1we,m2we,t) for m1we, m2we in zip(m1w,m2w)]
mbi = [synapse_interpolate(m1be,m2be,t) for m1be, m2be in zip(m1b,m2b)]
return mwi, mbi
def InterpBeadError(w1,b1, w2,b2, write = False, name = "00"):
errors = []
#xdat,ydat = generatecandidate4(.5, .25, .1, 1000)
#xdat,ydat = mnist.train.next_batch(1000)
#xdat = mnist.test.images
#ydat = mnist.test.labels
#xdat = np.array(xdat)
#ydat = np.array(ydat)
for tt in xrange(20):
#print tt
#accuracy = 0.
t = tt/20.
thiserror = 0
#x0 = tf.placeholder("float", [None, n_input])
#y0 = tf.placeholder("float", [None, n_classes])
weights, biases = model_interpolate(w1,b1,w2,b2, t)
#interp_model = multilayer_perceptron(w=weights, b=biases)
interp_model = convnet(w=weights, b=biases)
with interp_model.g.as_default():
xdat, ydat = cifar10.inputs(eval_data='test')
logit_test = interp_model.predict(xdat)
top_k_op = tf.nn.in_top_k(logit_test, ydat, 1)
pred = interp_model.predict(xdat)
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
tf.train.start_queue_runners(sess=sess)
num_iter = 20
true_count = 0 # Counts the number of correct predictions.
total_sample_count = num_iter * batch_size
step = 0
while step < num_iter:
predictions = sess.run([top_k_op])
true_count += np.sum(predictions)
step += 1
precision = true_count / total_sample_count
print "Accuracy:", precision
#,"\t",tt,weights[0][1][0],weights[0][1][1]
thiserror = 1 - precision
errors.append(thiserror)
if write == True:
with open("f" + str(name) + ".out",'w+') as f:
for e in errors:
f.write(str(e) + "\n")
return max(errors), np.argmax(errors)
def _activation_summary(x):
"""Helper to create summaries for activations.
Creates a summary that provides a histogram of activations.
Creates a summary that measures the sparsity of activations.
Args:
x: Tensor
Returns:
nothing
"""
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.histogram_summary(tensor_name + '/activations', x)
tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
def _variable_on_cpu(name, shape, initializer):
"""Helper to create a Variable stored on CPU memory.
Args:
name: name of the variable
shape: list of ints
initializer: initializer for Variable
Returns:
Variable Tensor
"""
with tf.device('/cpu:0'):
dtype = tf.float16 if False else tf.float32
var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)
return var
def _variable_with_weight_decay(name, shape, stddev, wd):
"""Helper to create an initialized Variable with weight decay.
Note that the Variable is initialized with a truncated normal distribution.
A weight decay is added only if one is specified.
Args:
name: name of the variable
shape: list of ints
stddev: standard deviation of a truncated Gaussian
wd: add L2Loss weight decay multiplied by this float. If None, weight
decay is not added for this Variable.
Returns:
Variable Tensor
"""
dtype = tf.float16 if False else tf.float32
var = _variable_on_cpu(
name,
shape,
tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))
if wd is not None:
weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def distorted_inputs():
"""Construct distorted input for CIFAR training using the Reader ops.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
Raises:
ValueError: If no data_dir
"""
if not FLAGS.data_dir:
raise ValueError('Please supply a data_dir')
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
images, labels = cifar10_input.distorted_inputs(data_dir=data_dir,
batch_size=FLAGS.batch_size)
if False:
images = tf.cast(images, tf.float16)
labels = tf.cast(labels, tf.float16)
return images, labels
def inputs(eval_data):
"""Construct input for CIFAR evaluation using the Reader ops.
Args:
eval_data: bool, indicating if one should use the train or eval data set.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
Raises:
ValueError: If no data_dir
"""
if not FLAGS.data_dir:
raise ValueError('Please supply a data_dir')
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
images, labels = cifar10_input.inputs(eval_data=eval_data,
data_dir=data_dir,
batch_size=FLAGS.batch_size)
if False:
images = tf.cast(images, tf.float16)
labels = tf.cast(labels, tf.float16)
return images, labels
#Class definitions
class convnet():
def __init__(self, w=0, b=0, ind='00'):
self.index = ind
learning_rate = .001
training_epochs = 15
batch_size = 100
display_step = 1
# Network Parameters
n_hidden_1 = 256 # 1st layer number of features
n_hidden_2 = 256 # 2nd layer number of features
n_input = 784 # Guess quadratic function
n_classes = 10 #
self.g = tf.Graph()
self.params = []
with self.g.as_default():
#Note that by default, weights and biases will be initialized to random normal dists
if w==0:
self.weights = {
'c1': _variable_with_weight_decay('c1',shape=[5, 5, 3, 64],stddev=5e-2,wd=0.0),
'c2': _variable_with_weight_decay('c2',shape=[5, 5, 64, 64],stddev=5e-2,wd=0.0),
'fc1': _variable_with_weight_decay('fc1', shape=[2304, 384],stddev=0.04, wd=0.004),
'fc2': _variable_with_weight_decay('fc2', shape=[384, 192],stddev=0.04, wd=0.004),
'out': _variable_with_weight_decay('out', [192, NUM_CLASSES],stddev=1/192.0, wd=0.0)
}
self.weightslist = [self.weights['c1'],self.weights['c2'],self.weights['fc1'],self.weights['fc2'],self.weights['out']]
self.biases = {
'b1': _variable_on_cpu('b1', [64], tf.constant_initializer(0.0)),
'b2': _variable_on_cpu('b2', [64], tf.constant_initializer(0.1)),
'b3': _variable_on_cpu('b3', [384], tf.constant_initializer(0.1)),
'b4': _variable_on_cpu('b4', [192], tf.constant_initializer(0.1)),
'out': _variable_on_cpu('bo', [NUM_CLASSES],tf.constant_initializer(0.0))
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['b3'],self.biases['b4'],self.biases['out']]
else:
self.weights = {
'c1': tf.Variable(w[0]),
'c2': tf.Variable(w[1]),
'fc1': tf.Variable(w[2]),
'fc2': tf.Variable(w[3]),
'out': tf.Variable(w[4])
}
self.weightslist = [self.weights['c1'],self.weights['c2'],self.weights['fc1'],self.weights['fc2'],self.weights['out']]
self.biases = {
'b1': tf.Variable(b[0]),
'b2': tf.Variable(b[1]),
'b3': tf.Variable(b[2]),
'b4': tf.Variable(b[3]),
'out': tf.Variable(b[4])
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['b3'],self.biases['b4'],self.biases['out']]
self.saver = tf.train.Saver()
def predict(self, x):
with self.g.as_default():
layer_1 = tf.nn.conv2d(x, self.weights['c1'], [1, 1, 1, 1], padding='SAME')
layer_1 = tf.nn.bias_add(layer_1, self.biases['b1'])
layer_1 = tf.nn.relu(layer_1, name='layer_1')
#_activation_summary(layer_1)
pool_1 = tf.nn.max_pool(layer_1, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1],padding='SAME', name='pool1')
norm_1 = tf.nn.lrn(pool_1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,name='norm1')
layer_2 = tf.nn.conv2d(norm_1, self.weights['c2'], [1, 1, 1, 1], padding='SAME')
layer_2 = tf.nn.bias_add(layer_2, self.biases['b2'])
layer_2 = tf.nn.relu(layer_2, name='layer_2')
#_activation_summary(layer_2)
norm_2 = tf.nn.lrn(layer_2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,name='norm2')
pool_2 = tf.nn.max_pool(norm_2, ksize=[1, 3, 3, 1],strides=[1, 2, 2, 1], padding='SAME', name='pool2')
reshape = tf.reshape(pool_2, [FLAGS.batch_size, -1])
layer_3 = tf.nn.relu(tf.matmul(reshape, self.weights['fc1']) + self.biases['b3'], name='fc1')
#_activation_summary(layer_3)
layer_4 = tf.nn.relu(tf.matmul(layer_3, self.weights['fc2']) + self.biases['b4'], name='fc2')
#_activation_summary(layer_4)
out_layer = tf.add(tf.matmul(layer_4, self.weights['out']), self.biases['out'], name='out')
#_activation_summary(out)
return out_layer
def ReturnParamsAsList(self):
with self.g.as_default():
with tf.Session() as sess:
# Restore variables from disk
self.saver.restore(sess, "/home/dfreeman/PythonFun/tmp/model"+str(self.index)+".ckpt")
return sess.run(self.weightslist), sess.run(self.biaseslist)
class multilayer_perceptron():
#weights = {}
#biases = {}
def __init__(self, w=0, b=0, ind='00'):
self.index = ind #used for reading values from file
#See the filesystem convention below (is this really necessary?)
#I'm going to eschew writing to file for now because I'll be generating too many files
#Currently, the last value of the parameters is stored in self.params to be read
learning_rate = 0.001
training_epochs = 15
batch_size = 100
display_step = 1
# Network Parameters
n_hidden_1 = 256 # 1st layer number of features
n_hidden_2 = 256 # 2nd layer number of features
n_input = 784 # Guess quadratic function
n_classes = 10 #
self.g = tf.Graph()
self.params = []
with self.g.as_default():
#Note that by default, weights and biases will be initialized to random normal dists
if w==0:
self.weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
self.biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
else:
self.weights = {
'h1': tf.Variable(w[0]),
'h2': tf.Variable(w[1]),
'out': tf.Variable(w[2])
}
self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
self.biases = {
'b1': tf.Variable(b[0]),
'b2': tf.Variable(b[1]),
'out': tf.Variable(b[2])
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
self.saver = tf.train.Saver()
def UpdateWeights(self, w, b):
with self.g.as_default():
self.weights = {
'h1': tf.Variable(w[0]),
'h2': tf.Variable(w[1]),
'out': tf.Variable(w[2])
}
self.weightslist = [self.weights['h1'],self.weights['h2'],self.weights['out']]
self.biases = {
'b1': tf.Variable(b[0]),
'b2': tf.Variable(b[1]),
'out': tf.Variable(b[2])
}
self.biaseslist = [self.biases['b1'],self.biases['b2'],self.biases['out']]
def predict(self, x):
with self.g.as_default():
layer_1 = tf.add(tf.matmul(x, self.weights['h1']), self.biases['b1'])
layer_1 = tf.nn.relu(layer_1)
# Hidden layer with RELU activation
layer_2 = tf.add(tf.matmul(layer_1, self.weights['h2']), self.biases['b2'])
layer_2 = tf.nn.relu(layer_2)
# Output layer with linear activation
out_layer = tf.matmul(layer_2, self.weights['out']) + self.biases['out']
return out_layer
def ReturnParamsAsList(self):
with self.g.as_default():
with tf.Session() as sess:
# Restore variables from disk
self.saver.restore(sess, "/home/dfreeman/PythonFun/tmp/model"+str(self.index)+".ckpt")
return sess.run(self.weightslist), sess.run(self.biaseslist)
class WeightString:
def __init__(self, w1, b1, w2, b2, numbeads, threshold):
self.w1 = w1
self.w2 = w2
self.b1 = b1
self.b2 = b2
#self.w2, self.b2 = m2.params
self.AllBeads = []
self.threshold = threshold
self.AllBeads.append([w1,b1])
for n in xrange(numbeads):
ws,bs = model_interpolate(w1,b1,w2,b2, (n + 1.)/(numbeads+1.))
self.AllBeads.append([ws,bs])
self.AllBeads.append([w2,b2])
self.ConvergedList = [False for f in xrange(len(self.AllBeads))]
self.ConvergedList[0] = True
self.ConvergedList[-1] = True
def SpringNorm(self, order):
totalweights = 0.
totalbiases = 0.
totaltotal = 0.
#Energy between mobile beads
for i,b in enumerate(self.AllBeads):
if i < len(self.AllBeads)-1:
#print "Tallying energy between bead " + str(i) + " and bead " + str(i+1)
subtotalw = 0.
subtotalb = 0.
#for j in xrange(len(b)):
subtotalw += np.linalg.norm(np.subtract(flatten(self.AllBeads[i][0]),flatten(self.AllBeads[i+1][0])),ord=order)#/len(self.beads[0][j])
#for j in xrange(len(b)):
subtotalb += np.linalg.norm(np.subtract(flatten(self.AllBeads[i][1]),flatten(self.AllBeads[i+1][1])),ord=order)#/len(self.beads[0][j])
totalweights+=subtotalw
totalbiases+=subtotalb
totaltotal+=subtotalw + subtotalb
weightdist = np.linalg.norm(np.subtract(flatten(self.AllBeads[0][0]),flatten(self.AllBeads[-1][0])),ord=order)
biasdist = np.linalg.norm(np.subtract(flatten(self.AllBeads[0][1]),flatten(self.AllBeads[-1][1])),ord=order)
totaldist = np.linalg.norm(np.subtract(flatten(self.AllBeads[0]),flatten(self.AllBeads[-1])),ord=order)
return [totalweights,totalbiases,totaltotal, weightdist, biasdist, totaldist]#/len(self.beads)
def SGDBead(self, bead, thresh, maxindex):
finalerror = 0.
#thresh = .05
# Parameters
learning_rate = 0.001
training_epochs = 15
batch_size = 100
display_step = 1
curWeights, curBiases = self.AllBeads[bead]
#test_model = multilayer_perceptron(w=curWeights, b=curBiases)
test_model = convnet(w=curWeights, b=curBiases)
with test_model.g.as_default():
global_step = tf.Variable(0, trainable=False)
# Get images and labels for CIFAR-10.
images, labels = cifar10.distorted_inputs()
test_images, test_labels = cifar10.inputs(eval_data='test')
# Build a Graph that computes the logits predictions from the
# inference model.
logits = test_model.predict(images)
logit_test = test_model.predict(test_images)
# Calculate loss.
loss = cifar10.loss(logits, labels)
# Build a Graph that trains the model with one batch of examples and
# updates the model parameters.
train_op = cifar10.train(loss, global_step)
top_k_op = tf.nn.in_top_k(logit_test, test_labels, 1)
# Build an initialization operation to run below.
init = tf.initialize_all_variables()
# Start running operations on the Graph.
#sess = tf.Session(config=tf.ConfigProto(
# log_device_placement=FLAGS.log_device_placement))
with tf.Session(config=tf.ConfigProto(
log_device_placement=False)) as sess:
sess.run(init)
tf.train.start_queue_runners(sess=sess)
step = 0
stopcond = True
while step < max_steps and stopcond:
start_time = time.time()
_, loss_value = sess.run([train_op, loss])
duration = time.time() - start_time
assert not np.isnan(loss_value), 'Model diverged with loss = NaN'
if step % 10 == 0:
num_examples_per_step = batch_size
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), step, loss_value,
examples_per_sec, sec_per_batch))
if step % 100 == 0:
num_iter = int(math.ceil(num_examples / batch_size))
true_count = 0 # Counts the number of correct predictions.
total_sample_count = num_iter * batch_size
stepp = 0
while stepp < num_iter:
predictions = sess.run([top_k_op])
true_count += np.sum(predictions)
stepp += 1
# Compute precision @ 1.
precision = true_count / total_sample_count
print('%s: precision @ 1 = %.3f' % (datetime.now(), precision))
if precision > 1 - thresh:
stopcond = False
test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
self.AllBeads[bead]=test_model.params
finalerror = 1 - precision
print ("Final bead error: ",str(finalerror))
step += 1
return finalerror
#Model generation
#copy_model = multilayer_perceptron(ind=0)
copy_model = convnet(ind=0)
for ii in xrange(2):
'''weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}'''
# Construct model with different initial weights
#test_model = multilayer_perceptron(ind=ii)
test_model = convnet(ind=ii)
#Construct model with same initial weights
#test_model = copy.copy(copy_model)
#test_model.index = ii
#print test_model.weights
models.append(test_model)
with test_model.g.as_default():
global_step = tf.Variable(0, trainable=False)
# Get images and labels for CIFAR-10.
images, labels = cifar10.distorted_inputs()
test_images, test_labels = cifar10.inputs(eval_data='test')
# Build a Graph that computes the logits predictions from the
# inference model.
logits = test_model.predict(images)
logit_test = test_model.predict(test_images)
# Calculate loss.
loss = cifar10.loss(logits, labels)
# Build a Graph that trains the model with one batch of examples and
# updates the model parameters.
train_op = cifar10.train(loss, global_step)
top_k_op = tf.nn.in_top_k(logit_test, test_labels, 1)
# Build an initialization operation to run below.
init = tf.initialize_all_variables()
# Start running operations on the Graph.
#sess = tf.Session(config=tf.ConfigProto(
# log_device_placement=FLAGS.log_device_placement))
with tf.Session(config=tf.ConfigProto(
log_device_placement=False)) as sess:
sess.run(init)
tf.train.start_queue_runners(sess=sess)
step = 0
stopcond = True
while step < max_steps and stopcond:
start_time = time.time()
_, loss_value = sess.run([train_op, loss])
duration = time.time() - start_time
assert not np.isnan(loss_value), 'Model diverged with loss = NaN'
if step % 10 == 0:
num_examples_per_step = batch_size
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f '
'sec/batch)')
print (format_str % (datetime.now(), step, loss_value,
examples_per_sec, sec_per_batch))
if step % 100 == 0:
num_iter = int(math.ceil(num_examples / batch_size))
true_count = 0 # Counts the number of correct predictions.
total_sample_count = num_iter * batch_size
stepp = 0
while stepp < num_iter:
predictions = sess.run([top_k_op])
true_count += np.sum(predictions)
stepp += 1
# Compute precision @ 1.
precision = true_count / total_sample_count
print('%s: precision @ 1 = %.3f' % (datetime.now(), precision))
if precision > 1 - thresh:
stopcond = False
test_model.params = sess.run(test_model.weightslist), sess.run(test_model.biaseslist)
step += 1
#Connected components search
#Used for softening the training criteria. There's some fuzz required due to the difference in
#training error between test and training
thresh_multiplier = 1.1
results = []
connecteddict = {}
for i1 in xrange(len(models)):
connecteddict[i1] = 'not connected'
test = WeightString(models[0].params[0],models[0].params[1],models[1].params[0],models[1].params[1],1,1)
for i1 in xrange(len(models)):
print i1
for i2 in xrange(len(models)):
if i2 > i1 and ((connecteddict[i1] != connecteddict[i2]) or (connecteddict[i1] == 'not connected' or connecteddict[i2] == 'not connected')) :
#print "slow1?"
#print i1,i2
#print models[0]
#print models[1]
#print models[0].params
#print models[1].params
#test = WeightString(models[i1].params[0],models[i1].params[1],models[i2].params[0],models[i2].params[1],1,1)
training_threshold = thresh
depth = 0
d_max = 10
#Check error between beads
#Alg: for each bead at depth i, SGD until converged.
#For beads with max error along path too large, add another bead between them, repeat
#Keeps track of which indices to check the interpbeaderror between
newindices = [0,1]
while (depth < d_max):
print newindices
#print "slow2?"
#X, y = GenTest(X,y)
counter = 0
for i,c in enumerate(test.ConvergedList):
if c == False:
#print "slow3?"
error = test.SGDBead(i, .98*training_threshold, 20)
#print "slow4?"
#if counter%5000==0:
# print counter
# print error
test.ConvergedList[i] = True
print test.ConvergedList
interperrors = []
interp_bead_indices = []
for b in xrange(len(test.AllBeads)-1):
if b in newindices:
e = InterpBeadError(test.AllBeads[b][0],test.AllBeads[b][1], test.AllBeads[b+1][0], test.AllBeads[b+1][1])
interperrors.append(e)
interp_bead_indices.append(b)
print interperrors
if max([ee[0] for ee in interperrors]) < thresh_multiplier*training_threshold:
depth = 2*d_max
#print test.ConvergedList
#print test.SpringNorm(2)
#print "Done!"
else:
del newindices[:]
#Interperrors stores the maximum error on the path between beads
#shift index to account for added beads
shift = 0
for i, ie in enumerate(interperrors):
if ie[0] > thresh_multiplier*training_threshold:
k = interp_bead_indices[i]
ws,bs = model_interpolate(test.AllBeads[k+shift][0],test.AllBeads[k+shift][1],\
test.AllBeads[k+shift+1][0],test.AllBeads[k+shift+1][1],\
ie[1]/20.)
test.AllBeads.insert(k+shift+1,[ws,bs])
test.ConvergedList.insert(k+shift+1, False)
newindices.append(k+shift+1)
newindices.append(k+shift)
shift+=1
#print test.ConvergedList
#print test.SpringNorm(2)
#print d_max
depth += 1
if depth == 2*d_max:
results.append([i1,i2,test.SpringNorm(2),"Connected"])
if connecteddict[i1] == 'not connected' and connecteddict[i2] == 'not connected':
connecteddict[i1] = i1
connecteddict[i2] = i1
if connecteddict[i1] == 'not connected':
connecteddict[i1] = connecteddict[i2]
else:
if connecteddict[i2] == 'not connected':
connecteddict[i2] = connecteddict[i1]
else:
if connecteddict[i1] != 'not connected' and connecteddict[i2] != 'not connected':
hold = connecteddict[i2]
connecteddict[i2] = connecteddict[i1]
for h in xrange(len(models)):
if connecteddict[h] == hold:
connecteddict[h] = connecteddict[i1]
else:
results.append([i1,i2,test.SpringNorm(2),"Disconnected"])
#print results[-1]
uniquecomps = []
totalcomps = 0
for i in xrange(len(models)):
if not (connecteddict[i] in uniquecomps):
uniquecomps.append(connecteddict[i])
if connecteddict[i] == 'not connected':
totalcomps += 1
#print i,connecteddict[i]
notconoffset = 0
if 'not connected' in uniquecomps:
notconoffset = -1
#with open('DSSCIFAR.' + str(thresh) + '.' + str(num_run) + '.out','w+') as f:
print "Thresh: " + str(thresh) + "\n"
print "Comps: " + str(len(uniquecomps) + notconoffset + totalcomps) + "\n"
connsum = []
for r in results:
if r[3] == "Connected":
connsum.append(r[2])
#print r[2]
print "***\n"
print str(len(test.AllBeads)) + "\n"
print "\t".join([str(s) for s in connsum[0]])
#print np.average(connsum)
#print np.std(connsum)
|
mit
| -1,522,237,430,314,512,100
| 29.982571
| 144
| 0.615123
| false
| 2.942473
| true
| false
| false
|
neqelr17/banknotes
|
banknotes/settings.py
|
1
|
3207
|
"""
Django settings for banknotes project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm@hb1flp0z#d@+#(l=2^ox!(945_4o7(q5$3c2___h18$m=ad5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'budget.apps.BudgetConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'banknotes.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'banknotes.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
gpl-3.0
| -6,370,282,057,669,393,000
| 25.286885
| 91
| 0.688806
| false
| 3.508753
| false
| false
| false
|
berserkerbernhard/Lidskjalv
|
code/networkmonitor/modules/groups/grouplistmenu.py
|
1
|
2968
|
import os
import time
import dialog
from modules.sitegrouphosttools import SiteGroupHostTools
from modules.groups.group import Group
from modules.groups.groupform import GroupForm
from modules.groups.groupmenu import GroupMenu
from modules.sitegrouphosttools import get_group_members
class GroupListMenu(SiteGroupHostTools):
def __init__(self):
self.d = dialog.Dialog(dialog="dialog")
self.storage_path = os.path.expanduser("~/LidskjalvData")
self.g = Group()
self.gf = GroupForm()
self.gm = GroupMenu()
self.sght = SiteGroupHostTools()
def show_menu(self, site):
while True:
menu = self.build_menu(site)
sz = os.get_terminal_size()
s = "Select a group or action in site '%s'" % site
code, tag = self.d.menu(s,
title="Site: '%s' - Groups menu" % site,
height=sz.lines - 5,
width=sz.columns - 8,
menu_height=sz.lines - 15,
backtitle="Lidskjalv",
choices=menu)
if code == self.d.OK:
r = self.process_menu(site, tag)
if r is None:
break
else:
break
def build_menu(self, site):
sp = self.storage_path
if not self.g.group_exist(site, 'Cisco Switches'):
self.g.create_group(site,
'Cisco Switches',
"",
int(time.time()),
[])
if not self.g.group_exist(site, 'MAC exempt'):
self.g.create_group(site,
'MAC exempt',
"",
int(time.time()),
[])
if not self.g.group_exist(site, 'Nagios'):
self.g.create_group(site,
'Nagios',
"",
int(time.time()),
[])
listofgroups = self.g.list_of_groups_by_name(site)
menu = []
menu.append(["AG", "Add group"])
menu.append(["Q", "Quit"])
menu.append(["", " "])
for group in listofgroups:
memberslist = get_group_members(sp, site, group)
ml = len(memberslist)
gd = self.g.get_group_description(site, group)
d = "%s Member(s) - %s" % (str(ml).rjust(3), gd)
menu.append([group, d])
return menu
def process_menu(self, site, tag):
if tag == "Q":
return None
if tag == "AG":
self.gf.group_form(site, None)
if tag in self.g.list_of_groups(site):
self.gm.show_menu(site, tag)
return True
|
gpl-3.0
| -3,861,360,141,004,099,600
| 36.56962
| 76
| 0.453504
| false
| 4.332847
| false
| false
| false
|
bielawb/PSConfAsia17-Linux
|
Scripts/httpsWinRM.py
|
1
|
2125
|
#!/usr/bin/env python
# coding: utf-8
import getpass
from re import search
from subprocess import Popen, PIPE
from winrm import Session
from sys import exit, argv
if len(argv) < 2 :
exit('Sposób użycia: %s <polecenie>' % argv[0])
polecenie = " ".join(argv[1:])
exitCode = 0
class PowerShellError(Exception):
pass
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def uruchom_ps(polecenie):
sesja = Session(
'https://jumpbox.monad.net:5986',
auth = (None, None),
transport = 'kerberos',
kerberos_delegation = True,
server_cert_validation = 'ignore'
)
try:
wynik = sesja.run_ps(polecenie)
print wynik.std_out
if wynik.status_code > 0:
raise PowerShellError(wynik.std_err)
else:
print "%sPolecenie zwróciło kod 0 %s" % (bcolors.OKGREEN, bcolors.ENDC)
except:
raise
def zaloguj():
login = "%s@MONAD.NET" % getpass.getuser()
kinit = Popen(['kinit', login, '-l', '1h', '-f'], stdin = PIPE, stdout = PIPE, stderr = PIPE)
kinit.stdin.write('%s\n' % getpass.getpass('Podaj hasło: '))
kinit.wait()
try:
uruchom_ps(polecenie)
except PowerShellError as pse:
print "PowerShell zwrócił błąd:\n%s%s%s" % (bcolors.FAIL, pse, bcolors.ENDC)
exitCode = 1
except Exception as e:
print "Wyjątek:\n%s%s%s" % (bcolors.FAIL, e, bcolors.ENDC)
if search('No Kerberos credentials available', e.message):
print "Błąd wskazuje na konieczność zalogowania..."
try:
zaloguj()
uruchom_ps(polecenie)
except Exception as e:
print "%sNie udało się uruchomić polecenia '%s'. Prawdopodobnie podano nieprawidłowe hasło, bądź użytkownik nie ma odpowiednich uprawnień." % (bcolors.FAIL, polecenie)
print "Błąd: %s %s" % (e, bcolors.ENDC)
exitCode = 2
else:
exitCode = 3
finally:
exit(exitCode)
|
mit
| 7,049,195,507,617,604,000
| 28.577465
| 179
| 0.603333
| false
| 2.720207
| false
| false
| false
|
adityagilra/2015_spiking_population_response
|
ExcInhNetflex.py
|
1
|
5096
|
# -*- coding: utf-8 -*-
"""
Spiking neural net of LIF/SRM neurons with AI firing
written by Aditya Gilra (c) July 2015.
"""
from brian2 import * # also does 'from pylab import *'
from embedded_consts import *
import random
## Cannot make this network a Class,
## since brian standalone mode wants all Brian objects to be in the same scope.
###### neuronal constants
#nrn_type = 'LIF' # Leaky Integrate-and-Fire
#nrn_type = 'SRM' # Spike Response Model
nrn_type = 'SRM0' # Spike Response Model exact renewal
R = 1.0e8*ohm
tausynE = 100.0*ms # synaptic tau exc->exc
tausyn = 10.0*ms # synaptic tau for all else
tau0 = 20.0*ms # membrane tau
tau0SI = tau0/second
noise = 20.0*mV
uth = 10.0*mV
uth_base = 0.0*mV
refrT = 0.5*ms
###### network constants
C = 100 # Number of incoming connections on each neuron (exc or inh)
fC = fexc # fraction fC incoming connections are exc, rest inhibitory
excC = int(fC*C) # number of exc incoming connections
if nrn_type == "LIF":
I0base = 10.5*mV/R # base current to all neurons at all times
J = 0.8*mV/R*(10*ms/tausynE)
else:
I0base = 0.0*mV/R # base current to all neurons at all times
J = 0.8*mV/R*(10*ms/tausynE)
# exc strength is J (/R as we multiply by R in eqn)
# Critical J (for LIF network with delta synapses) is
# ~ 0.45e-3 V in paper for N = 10000, C = 1000
# Note individual rate fluctuations
# for J = 0.2e-3 V vs J = 0.8e-3 V
# For SRM/SRM0, synaptic filtering but no u integration
# In Ostojic 2014 / Brunel 2000, u integration,
# but no synaptic filtering.
# Both are equivalent if tausyn and membrane tau are same.
# But LIF with synaptic filtering is different
g = 5.0*tausynE/tausyn # if all exc syns have tausynE
#g = 5.0*(tausynE/tausyn)**2 # if only exc->exc syns have tausynE, but exc->inh is tausyn
# -gJ is the inh strength. For exc-inh balance g >~ f(1-f)=4
# a tausynE/tausyn factor is also needed to compensate tau-s
# ###########################################
# Brian network creation
# ###########################################
# reset eta acts as a threshold increase
if nrn_type == "LIF": # LIF
model_eqns = """
du/dt = 1/tau0*(-u + (Ibase + KE + K) * R + deltaItimed( t, i )) : volt
Ibase : amp
dKE/dt = -KE/tausynE : amp
dK/dt = -K/tausyn : amp
"""
threshold_eqns = "u>=uth"
reset_eqns = "u=0*mV"
else: # SRM
model_eqns = """
u = (Ibase + KE + K) * R + deltaItimed( t, i ): volt
Ibase : amp
deta/dt = -eta/tau0 : volt
dKE/dt = -KE/tausynE : amp
dK/dt = -K/tausyn : amp
"""
threshold_eqns = "rand()<=1.0/tau0*exp((u-(eta+uth_base))/noise)*tstep"
if nrn_type == "SRM0": # SRM0 (exact renewal process)
reset_eqns = "eta=uth"
else: # usual SRM (approx as quasi-renewal process)
reset_eqns = "eta+=uth"
# the hazard function rho is the firing rate,
# in time dt the probability to fire is rho*dt.
# noise below is only the output noise,
# input spiking noise comes from spiking during the simulation
Nrns = NeuronGroup(Nbig, model_eqns, \
threshold=threshold_eqns,\
reset=reset_eqns,
refractory = refrT)
Nrns.Ibase = I0base # constant input to all inputs
# there is also transient input above
if nrn_type == 'LIF':
Nrns.u = uniform(0.0,uth/volt,size=Nbig)*volt
# for LIF, u is distibuted
else:
Nrns.eta = uth # initially, all SRM neurons are as if just reset
# brain2 code to make, connect and weight the background synapses
con = Synapses(Nrns,Nrns,'''w : amp
useSynE : 1''',\
pre='KE += useSynE*w; K += (1-useSynE)*w')
## Connections from some Exc/Inh neurons to each neuron
random.seed(100) # set seed for reproducibility of simulations
seed(100)
conn_i = []
conn_j = []
for jidx in range(0,Nbig):
## draw excC number of neuron indices out of NmaxExc neurons
preIdxsE = random.sample(range(NEbig),excC)
## draw inhC=C-excC number of neuron indices out of inhibitory neurons
preIdxsI = random.sample(range(NEbig,Nbig),C-excC)
## connect these presynaptically to i-th post-synaptic neuron
## choose the synapses object based on whether post-syn nrn is exc or inh
conn_i += preIdxsE
conn_j += [jidx]*excC
conn_i += preIdxsI
conn_j += [jidx]*(C-excC)
con.connect(conn_i,conn_j)
con.delay = syndelay
con.useSynE['i<NEbig'] = 1.0
con.w['i<NEbig'] = J
con.w['i>=NEbig'] = -g*J
#con.w = -g*J # kind of winner take all, gives switching
|
gpl-3.0
| 5,565,042,228,597,522,000
| 39.768
| 89
| 0.562991
| false
| 3.132145
| false
| false
| false
|
HazenBabcock/brigl
|
test/driver.py
|
1
|
2249
|
#!/usr/bin/env python
"""
This returns an automated web browser to use for automated testing. It also
includes some utility functions.
https://www.seleniumhq.org/
http://selenium-python.readthedocs.io/
"""
import time
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
# Web browser interface.
def getDriver():
"""
This is configured to use Chrome, change as desired.
"""
desired = DesiredCapabilities.CHROME
desired['loggingPrefs'] = {'browser' : 'ALL'}
options = webdriver.ChromeOptions()
options.add_argument("--window-size=1000,1000")
driver = webdriver.Chrome(executable_path="./chromedriver",
desired_capabilities = desired,
chrome_options = options)
return driver
# Utility functions.
class BRIGLTestException(Exception):
pass
def noSevereErrors(driver, ignore_404 = []):
"""
ignore_404 - A list of files for which it is okay if they are missing.
"""
ignore_404.append("favicon.ico")
log_data = driver.get_log('browser')
severe_errors = parseLog(log_data)
if (len(severe_errors) > 0):
is_severe = False
for s_err in severe_errors:
is_ignored = False
for i_404 in ignore_404:
if (i_404 in s_err['message']):
is_ignored = True
break
if not is_ignored:
is_severe = True
break
if is_severe:
print("Severe error(s) detected:")
for elt in severe_errors:
print(elt)
raise BRIGLTestException("Severe error(s) detected.")
def parseLog(log_data, level = 'SEVERE'):
"""
Return only those messages with the specified level.
"""
temp = []
for elt in log_data:
if (elt['level'] == level):
temp.append(elt)
return temp
def pprintLog(log_data):
"""
Pretty print log messages.
"""
for elt in log_data:
print(elt)
if (__name__ == "__main__"):
driver = getDriver()
print("version is", driver.capabilities['version'])
driver.close()
|
gpl-3.0
| -7,560,753,491,243,775,000
| 24.850575
| 80
| 0.582926
| false
| 4.044964
| false
| false
| false
|
ctogle/dilapidator
|
test/geometry/quat_tests.py
|
1
|
6809
|
from dilap.geometry.quat import quat
from dilap.geometry.vec3 import vec3
import dilap.geometry.tools as dpr
import matplotlib.pyplot as plt
import unittest,numpy,math
import pdb
#python3 -m unittest discover -v ./ "*tests.py"
class test_quat(unittest.TestCase):
def test_av(self):
a = 3*dpr.PI4
u1,u2,u3 = vec3(1,0,0),vec3(0,-1,0),vec3(0,0,1)
q1,q2 = quat(0,0,0,0).av(a,u1),quat(0,0,0,0).av(a,u2)
q3,q4 = quat(0,0,0,0).av(-a,u3),quat(0,0,0,0).av(-a,u2)
self.assertTrue(q1.w > 0.1)
self.assertTrue(q1.x > 0.1)
self.assertTrue(dpr.isnear(q1.y,0))
self.assertTrue(dpr.isnear(q1.z,0))
self.assertTrue(q2.w > 0.1)
self.assertTrue(dpr.isnear(q2.x,0))
self.assertTrue(q2.y < -0.1)
self.assertTrue(dpr.isnear(q2.z,0))
self.assertTrue(q3.w > 0.1)
self.assertTrue(dpr.isnear(q3.x,0))
self.assertTrue(dpr.isnear(q3.y,0))
self.assertTrue(q3.z < -0.1)
self.assertFalse(q2 == q4.cp().flp())
self.assertTrue(q2 == q4.cnj())
def test_uu(self):
u1,u2,u3 = vec3(1,0,0),vec3(0,-1,0),vec3(0,0,1)
q1,q2 = quat(0,0,0,0).uu(u1,u2),quat(0,0,0,0).uu(u1,u3)
q3,q4 = quat(0,0,0,0).uu(u2,u3),quat(0,0,0,0).uu(u3,u2)
self.assertTrue(q1.w > 0.1)
self.assertTrue(dpr.isnear(q1.x,0))
self.assertTrue(dpr.isnear(q1.y,0))
self.assertTrue(q1.z < -0.1)
self.assertTrue(q2.w > 0.1)
self.assertTrue(dpr.isnear(q2.x,0))
self.assertTrue(q2.y < -0.1)
self.assertTrue(dpr.isnear(q2.z,0))
self.assertTrue(q3 == q4.cnj())
def test_toxy(self):
q1 = quat(0,0,0,0).toxy(vec3(0,0,-1))
#print('toxy\v\t',q1)
self.assertEqual(q1.w,0)
self.assertEqual(q1.x,1)
def test_cp(self):
q1 = quat(1,2,3,4)
self.assertTrue(q1 is q1)
self.assertFalse(q1 is q1.cp())
self.assertTrue(q1 == q1.cp())
#def test_cpf(self):
def test_isnear(self):
q1,q2 = quat(1,1,1,0),quat(1,1,1,0.1)
q3,q4 = quat(1,1,1,1),quat(1,1.000001,1,1)
self.assertEqual(q1.isnear(q1),1)
self.assertEqual(q3.isnear(q3),1)
self.assertEqual(q1.isnear(q2),0)
self.assertEqual(q2.isnear(q1),0)
self.assertEqual(q1.isnear(q3),0)
self.assertEqual(q2.isnear(q3),0)
self.assertEqual(q2.isnear(q4),0)
self.assertEqual(q3.isnear(q4),1)
def test_mag2(self):
q1,q2,q3 = quat(1,0,0,0),quat(1,1,1,0),quat(0,2,5,11)
self.assertEqual(dpr.isnear(q1.mag2(),1),1)
self.assertEqual(dpr.isnear(q2.mag2(),3),1)
self.assertEqual(dpr.isnear(q3.mag2(),150),1)
def test_mag(self):
q1,q2,q3 = quat(1,0,0,0),quat(1,1,1,0),quat(0,2,5,11)
self.assertEqual(dpr.isnear(q1.mag(),1),1)
self.assertEqual(dpr.isnear(q2.mag(),math.sqrt(3)),1)
self.assertEqual(dpr.isnear(q3.mag(),math.sqrt(150)),1)
def test_nrm(self):
q1,q2,q3 = quat(1,0,0,0),quat(1,1,1,0),quat(0,2,5,11)
self.assertEqual(dpr.isnear(q1.cp().nrm().mag(),1),1)
self.assertEqual(dpr.isnear(q2.cp().nrm().mag(),1),1)
self.assertEqual(dpr.isnear(q3.cp().nrm().mag(),1),1)
self.assertTrue(q1.cp().nrm().mag() == q1.mag())
self.assertTrue(q1.nrm() is q1)
self.assertFalse(q2.cp().nrm().mag() == q2.mag())
self.assertTrue(q2.nrm() is q2)
self.assertFalse(q3.cp().nrm().mag() == q3.mag())
self.assertTrue(q3.nrm() is q3)
def test_flp(self):
q1,q2 = quat(1,0,0,0),quat(1,1,1,0)
q3,q4 = quat(0,2,5,11),quat(-1,1,1,0)
self.assertFalse(q1.cp().flp() == q1)
self.assertFalse(q2.cp().flp() == q2)
self.assertTrue(q3.cp().flp() == q3)
self.assertFalse(q4.cp().flp() == q4)
self.assertTrue(q2.cp().flp() == q4)
self.assertTrue(q1.flp() is q1)
self.assertTrue(q2.flp() is q2)
self.assertTrue(q3.flp() is q3)
self.assertTrue(q4.flp() is q4)
def test_uscl(self):
q1,q2 = quat(1,0,0,0),quat(1,1,1,0)
q3,q4 = quat(0,2,5,11),quat(0,1,2.5,5.5)
self.assertTrue(q1.cp().uscl(1) == q1)
self.assertFalse(q1.cp().uscl(3) == q1)
self.assertTrue(q2.cp().uscl(1) == q2)
self.assertFalse(q2.cp().uscl(3) == q2)
self.assertTrue(q3.cp().uscl(0.5) == q4)
self.assertTrue(q1.uscl(1) is q1)
def test_cnj(self):
q1,q2 = quat(1,0,0,0),quat(1,1,1,0)
q3,q4 = quat(-1,2,5,11),quat(1,-2,-5,-11)
self.assertTrue(q1.cp().cnj() == q1)
self.assertTrue(q1.cnj() is q1)
self.assertFalse(q2.cp().cnj() == q2)
self.assertFalse(q3.cnj() == q4)
def test_inv(self):
a1,v1 = dpr.PI4,vec3(0,0,1)
a2,v2 = dpr.threePI4,vec3(0,0,1)
q1,q2 = quat(1,0,0,0).av(a1,v1),quat(1,1,1,0).av(a2,v2)
self.assertEqual(q1.cp().cnj(),q1.inv())
self.assertEqual(q2.cp().cnj(),q2.inv())
self.assertFalse(q1.inv() is q1)
def test_add(self):
q1,q2 = quat(0.5,0.3,-2.2,3),quat(1,1.1,2,-0.5)
q3 = quat(1.5,1.4,-0.2,2.5)
self.assertEqual(q1.add(q2),q3)
self.assertFalse(q1.add(q2) is q1)
def test_sub(self):
q1,q2 = quat(0.5,0.3,-2.2,3),quat(1,1.1,2,-0.5)
q3 = quat(-0.5,-0.8,-4.2,3.5)
self.assertEqual(q1.sub(q2),q3)
self.assertFalse(q1.sub(q2) is q1)
def test_mul(self):
a1,v1 = dpr.PI4,vec3(0,0,1)
a2,v2 = dpr.threePI4,vec3(0,0,1)
q1,q2 = quat(1,0,0,0).av(a1,v1),quat(1,1,1,0).av(a2,v1)
q3 = quat(0,1,0,0).av(a1+a2,v2)
self.assertTrue(q1.mul(q2) == q3)
self.assertFalse(q1.mul(q2) is q1)
def test_rot(self):
a1,v1 = dpr.PI4,vec3(0,0,1)
a2,v2 = dpr.PI2,vec3(0,0,1)
q1,q2 = quat(1,0,0,0).av(a1,v1),quat(1,1,1,0).av(a1,v1)
q3 = quat(0,1,0,0).av(a2,v2)
self.assertTrue(q1.rot(q2) == q3)
self.assertTrue(q1.rot(q2) is q1)
#def test_rotps(self):
def test_dot(self):
a1,v1 = dpr.PI4,vec3(0,0,1)
a2,v2 = dpr.PI2,vec3(0,1,0)
q1,q2 = quat(1,0,0,0).av(a1,v1),quat(1,1,1,0).av(a1,v1)
q3 = quat(0,1,0,0).av(a2,v2)
q4 = quat(0,1,0,0).av(0,v1)
self.assertTrue(dpr.isnear(q1.dot(q2),q1.mag2()))
self.assertFalse(dpr.isnear(q1.dot(q3),0))
self.assertTrue(dpr.isnear(q3.dot(q4),q3.w))
def test_slerp(self):
a1,v1 = dpr.PI4,vec3(0,0,1)
a2,v2 = dpr.PI,vec3(0,0,1)
q1,q2 = quat(1,0,0,0).av(0,v1),quat(1,1,1,0).av(a1,v1)
q3 = quat(0,1,0,0).av(a2,v2)
self.assertEqual(q1.slerp(q3,0.25),q2)
self.assertFalse(q1.slerp(q3,0.25) is q1)
if __name__ == '__main__':
unittest.main()
|
mit
| -8,675,543,976,464,371,000
| 33.21608
| 63
| 0.545455
| false
| 2.185875
| true
| false
| false
|
carlos-jenkins/plantweb
|
test/sphinxconf/conf.py
|
1
|
9536
|
# -*- coding: utf-8 -*-
#
# PlantwebTest documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 24 03:02:39 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import unicode_literals, absolute_import
from __future__ import print_function, division
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'plantweb.directive'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'PlantwebTest'
copyright = '2016-2017, Carlos Jenkins'
author = 'Carlos Jenkins'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'PlantwebTestdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
'PlantwebTest.tex',
'PlantwebTest Documentation',
'Carlos Jenkins',
'manual'
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
'plantwebtest',
'PlantwebTest Documentation',
[author],
1
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
'PlantwebTest',
'PlantwebTest Documentation',
author,
'PlantwebTest',
'One line description of project.',
'Miscellaneous'
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
|
apache-2.0
| -4,205,343,303,474,952,700
| 30.681063
| 79
| 0.692429
| false
| 3.760252
| true
| false
| false
|
MrSwiss/SpockBot
|
spock/plugins/core/auth.py
|
1
|
5050
|
"""
Provides authorization functions for Mojang's login and session servers
"""
import hashlib
import json
# This is for python2 compatibility
try:
import urllib.request as request
from urllib.error import URLError
except ImportError:
import urllib2 as request
from urllib2 import URLError
import logging
import os
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from spock.mcp import yggdrasil
from spock.plugins.base import PluginBase
from spock.utils import pl_announce
logger = logging.getLogger('spock')
backend = default_backend()
# This function courtesy of barneygale
def java_hex_digest(digest):
d = int(digest.hexdigest(), 16)
if d >> 39 * 4 & 0x8:
d = "-%x" % ((-d) & (2 ** (40 * 4) - 1))
else:
d = "%x" % d
return d
class AuthCore(object):
def __init__(self, authenticated, event):
self.event = event
self.authenticated = authenticated
self.username = None
self.selected_profile = None
self.shared_secret = None
self.ygg = yggdrasil.YggAuth()
def start_session(self, username, password=''):
rep = {}
if self.authenticated:
logger.info("AUTHCORE: Attempting login with username: %s",
username)
rep = self.ygg.authenticate(username, password)
if rep is None or 'error' in rep:
logger.error('AUTHCORE: Login Unsuccessful, Response: %s', rep)
self.event.emit('AUTH_ERR')
return rep
if 'selectedProfile' in rep:
self.selected_profile = rep['selectedProfile']
self.username = rep['selectedProfile']['name']
logger.info("AUTHCORE: Logged in as: %s", self.username)
logger.info("AUTHCORE: Selected Profile: %s",
self.selected_profile)
else:
self.username = username
else:
self.username = username
return rep
def gen_shared_secret(self):
self.shared_secret = os.urandom(16)
return self.shared_secret
@pl_announce('Auth')
class AuthPlugin(PluginBase):
requires = ('Event', 'Net')
defaults = {
'authenticated': True,
'auth_quit': True,
'sess_quit': True,
}
events = {
'AUTH_ERR': 'handle_auth_error',
'SESS_ERR': 'handle_session_error',
'LOGIN<Encryption Request': 'handle_encryption_request',
}
def __init__(self, ploader, settings):
super(AuthPlugin, self).__init__(ploader, settings)
self.authenticated = self.settings['authenticated']
self.auth_quit = self.settings['auth_quit']
self.sess_quit = self.settings['sess_quit']
self.auth = AuthCore(self.authenticated, self.event)
self.auth.gen_shared_secret()
ploader.provides('Auth', self.auth)
def handle_auth_error(self, name, data):
if self.auth_quit:
self.event.kill()
def handle_session_error(self, name, data):
if self.sess_quit:
self.event.kill()
# Encryption Key Request - Request for client to start encryption
def handle_encryption_request(self, name, packet):
pubkey_raw = packet.data['public_key']
if self.authenticated:
serverid = java_hex_digest(hashlib.sha1(
packet.data['server_id'].encode('ascii') +
self.auth.shared_secret +
pubkey_raw
))
logger.info(
"AUTHPLUGIN: Attempting to authenticate session with "
"sessionserver.mojang.com")
url = "https://sessionserver.mojang.com/session/minecraft/join"
data = json.dumps({
'accessToken': self.auth.ygg.access_token,
'selectedProfile': self.auth.selected_profile,
'serverId': serverid,
}).encode('utf-8')
headers = {'Content-Type': 'application/json'}
req = request.Request(url, data, headers)
try:
rep = request.urlopen(req).read().decode('ascii')
except URLError:
rep = 'Couldn\'t connect to sessionserver.mojang.com'
if rep != "":
logger.warning("AUTHPLUGIN: %s", rep)
self.event.emit('SESS_ERR')
else:
logger.info("AUTHPLUGIN: Session authentication successful")
pubkey = serialization.load_der_public_key(pubkey_raw, backend)
def encrypt(data):
return pubkey.encrypt(data, padding.PKCS1v15())
self.net.push_packet(
'LOGIN>Encryption Response',
{
'shared_secret': encrypt(self.auth.shared_secret),
'verify_token': encrypt(packet.data['verify_token']),
}
)
self.net.enable_crypto(self.auth.shared_secret)
|
mit
| -2,816,174,595,662,378,500
| 33.121622
| 79
| 0.588515
| false
| 4.129191
| false
| false
| false
|
britcey/ansible
|
lib/ansible/modules/network/junos/junos_config.py
|
1
|
12228
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: junos_config
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage configuration on devices running Juniper JUNOS
description:
- This module provides an implementation for working with the active
configuration running on Juniper JUNOS devices. It provides a set
of arguments for loading configuration, performing rollback operations
and zeroing the active configuration on the device.
extends_documentation_fragment: junos
options:
lines:
description:
- This argument takes a list of C(set) or C(delete) configuration
lines to push into the remote device. Each line must start with
either C(set) or C(delete). This argument is mutually exclusive
with the I(src) argument.
required: false
default: null
src:
description:
- The I(src) argument provides a path to the configuration file
to load into the remote system. The path can either be a full
system path to the configuration file if the value starts with /
or relative to the root of the implemented role or playbook.
This argument is mutually exclusive with the I(lines) argument.
required: false
default: null
version_added: "2.2"
src_format:
description:
- The I(src_format) argument specifies the format of the configuration
found int I(src). If the I(src_format) argument is not provided,
the module will attempt to determine the format of the configuration
file specified in I(src).
required: false
default: null
choices: ['xml', 'set', 'text', 'json']
version_added: "2.2"
rollback:
description:
- The C(rollback) argument instructs the module to rollback the
current configuration to the identifier specified in the
argument. If the specified rollback identifier does not
exist on the remote device, the module will fail. To rollback
to the most recent commit, set the C(rollback) argument to 0.
required: false
default: null
zeroize:
description:
- The C(zeroize) argument is used to completely sanitize the
remote device configuration back to initial defaults. This
argument will effectively remove all current configuration
statements on the remote device.
required: false
default: null
confirm:
description:
- The C(confirm) argument will configure a time out value for
the commit to be confirmed before it is automatically
rolled back. If the C(confirm) argument is set to False, this
argument is silently ignored. If the value for this argument
is set to 0, the commit is confirmed immediately.
required: false
default: 0
comment:
description:
- The C(comment) argument specifies a text string to be used
when committing the configuration. If the C(confirm) argument
is set to False, this argument is silently ignored.
required: false
default: configured by junos_config
replace:
description:
- The C(replace) argument will instruct the remote device to
replace the current configuration hierarchy with the one specified
in the corresponding hierarchy of the source configuration loaded
from this module.
- Note this argument should be considered deprecated. To achieve
the equivalent, set the I(update) argument to C(replace). This argument
will be removed in a future release. The C(replace) and C(update) argument
is mutually exclusive.
required: false
choices: ['yes', 'no']
default: false
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. The backup file is written to the C(backup)
folder in the playbook root directory. If the directory does not
exist, it is created.
required: false
default: no
choices: ['yes', 'no']
version_added: "2.2"
update:
description:
- This argument will decide how to load the configuration
data particulary when the candidate configuration and loaded
configuration contain conflicting statements. Following are
accepted values.
C(merge) combines the data in the loaded configuration with the
candidate configuration. If statements in the loaded configuration
conflict with statements in the candidate configuration, the loaded
statements replace the candidate ones.
C(override) discards the entire candidate configuration and replaces
it with the loaded configuration.
C(replace) substitutes each hierarchy level in the loaded configuration
for the corresponding level.
required: false
default: merge
choices: ['merge', 'override', 'replace']
version_added: "2.3"
requirements:
- junos-eznc
notes:
- This module requires the netconf system service be enabled on
the remote device being managed.
- Loading JSON-formatted configuration I(json) is supported
starting in Junos OS Release 16.1 onwards.
"""
EXAMPLES = """
- name: load configure file into device
junos_config:
src: srx.cfg
comment: update config
provider: "{{ netconf }}"
- name: load configure lines into device
junos_config:
lines:
- set interfaces ge-0/0/1 unit 0 description "Test interface"
- set vlans vlan01 description "Test vlan"
comment: update config
provider: "{{ netconf }}"
- name: rollback the configuration to id 10
junos_config:
rollback: 10
provider: "{{ netconf }}"
- name: zero out the current configuration
junos_config:
zeroize: yes
provider: "{{ netconf }}"
- name: confirm a previous commit
junos_config:
provider: "{{ netconf }}"
"""
RETURN = """
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: string
sample: /playbooks/ansible/backup/config.2016-07-16@22:28:34
"""
import re
import json
import sys
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.junos import get_diff, load_config, get_configuration
from ansible.module_utils.junos import junos_argument_spec
from ansible.module_utils.junos import check_args as junos_check_args
from ansible.module_utils.netconf import send_request
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_text, to_native
if sys.version_info < (2, 7):
from xml.parsers.expat import ExpatError
ParseError = ExpatError
else:
ParseError = ElementTree.ParseError
USE_PERSISTENT_CONNECTION = True
DEFAULT_COMMENT = 'configured by junos_config'
def check_args(module, warnings):
junos_check_args(module, warnings)
if module.params['replace'] is not None:
module.fail_json(msg='argument replace is deprecated, use update')
zeroize = lambda x: send_request(x, ElementTree.Element('request-system-zeroize'))
rollback = lambda x: get_diff(x)
def guess_format(config):
try:
json.loads(config)
return 'json'
except ValueError:
pass
try:
ElementTree.fromstring(config)
return 'xml'
except ParseError:
pass
if config.startswith('set') or config.startswith('delete'):
return 'set'
return 'text'
def filter_delete_statements(module, candidate):
reply = get_configuration(module, format='set')
match = reply.find('.//configuration-set')
if match is None:
# Could not find configuration-set in reply, perhaps device does not support it?
return candidate
config = to_native(match.text, encoding='latin1')
modified_candidate = candidate[:]
for index, line in reversed(list(enumerate(candidate))):
if line.startswith('delete'):
newline = re.sub('^delete', 'set', line)
if newline not in config:
del modified_candidate[index]
return modified_candidate
def configure_device(module, warnings):
candidate = module.params['lines'] or module.params['src']
kwargs = {
'comment': module.params['comment'],
'commit': not module.check_mode
}
if module.params['confirm'] > 0:
kwargs.update({
'confirm': True,
'confirm_timeout': module.params['confirm']
})
config_format = None
if module.params['src']:
config_format = module.params['src_format'] or guess_format(str(candidate))
if config_format == 'set':
kwargs.update({'format': 'text', 'action': 'set'})
else:
kwargs.update({'format': config_format, 'action': module.params['update']})
if isinstance(candidate, string_types):
candidate = candidate.split('\n')
# this is done to filter out `delete ...` statements which map to
# nothing in the config as that will cause an exception to be raised
if any((module.params['lines'], config_format == 'set')):
candidate = filter_delete_statements(module, candidate)
kwargs['format'] = 'text'
kwargs['action'] = 'set'
return load_config(module, candidate, warnings, **kwargs)
def main():
""" main entry point for module execution
"""
argument_spec = dict(
lines=dict(type='list'),
src=dict(type='path'),
src_format=dict(choices=['xml', 'text', 'set', 'json']),
# update operations
update=dict(default='merge', choices=['merge', 'override', 'replace', 'update']),
# deprecated replace in Ansible 2.3
replace=dict(type='bool'),
confirm=dict(default=0, type='int'),
comment=dict(default=DEFAULT_COMMENT),
# config operations
backup=dict(type='bool', default=False),
rollback=dict(type='int'),
zeroize=dict(default=False, type='bool'),
)
argument_spec.update(junos_argument_spec)
mutually_exclusive = [('lines', 'src', 'rollback', 'zeroize')]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False, 'warnings': warnings}
if module.params['backup']:
for conf_format in ['set', 'text']:
reply = get_configuration(module, format=conf_format)
match = reply.find('.//configuration-%s' % conf_format)
if match is not None:
break
else:
module.fail_json(msg='unable to retrieve device configuration')
result['__backup__'] = match.text.strip()
if module.params['rollback']:
if not module.check_mode:
diff = rollback(module)
if module._diff:
result['diff'] = {'prepared': diff}
result['changed'] = True
elif module.params['zeroize']:
if not module.check_mode:
zeroize(module)
result['changed'] = True
else:
diff = configure_device(module, warnings)
if diff:
if module._diff:
result['diff'] = {'prepared': diff}
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
| -4,767,133,261,227,598,000
| 32.966667
| 89
| 0.66454
| false
| 4.343872
| true
| false
| false
|
Alwnikrotikz/paimei
|
console/modules/_PAIMEIdiff/DiffModules/crc.py
|
1
|
2243
|
#
# $Id$
#
from defines import *
class crc:
def __init__(self, parent=None):
self.attributes = {} # initialize attributes
self.attributes["Match"] = 1 # Match attribute set to 1 tells the main program we can be used to match
self.attributes["Diff"] = 1 # Diff attribute set to 1 tells the main program we can be used to diff
self.attributes["Level"] = FUNCTION_LEVEL | BASIC_BLOCK_LEVEL # these flags indicated we can diff/match both functions and basic blocks
self.parent = parent # set up the parent
self.module_name = "CRC" # give the module a name
self.author = "Peter Silberman" # author name
self.description = "CRC module uses the crc signature"
self.date = "09/22/06"
self.homepage = "http://www.openrce.org"
self.contact = "peter.silberman@gmail.com"
self.accuracy = ACCURACY_HIGH
self.parent.register_match_function( self.match_function_by_crc, self ) # register a function matching routine
self.parent.register_match_basic_block( self.match_basic_block_by_crc, self ) # register a basic block matching routine
self.parent.register_diff_function( self.diff_function_by_crc, self ) # register a function diffing routine
self.parent.register_module(self) # register our module in the module table
def match_function_by_crc(self, function_a, function_b):
if function_a.ext["PAIMEIDiffFunction"].crc == function_b.ext["PAIMEIDiffFunction"].crc:
return 1
else:
return 0
def match_basic_block_by_crc(self, bb_a, bb_b):
if bb_a.ext["PAIMEIDiffBasicBlock"].crc == bb_b.ext["PAIMEIDiffBasicBlock"].crc:
return 1
else:
return 0
def diff_function_by_crc(self, function_a, function_b):
if function_a.ext["PAIMEIDiffFunction"].crc != function_b.ext["PAIMEIDiffFunction"].crc:
return 0
else:
return 0
|
gpl-2.0
| -5,157,040,256,780,206,000
| 46.76087
| 145
| 0.57111
| false
| 4.07078
| false
| false
| false
|
legoktm/legobot-old
|
toolserver/pywp/timedate.py
|
1
|
2209
|
#!usr/bin/python
# (C) Legoktm 2008-2011, MIT License
import time, datetime
"""
Not to be run as a file
Contains lists and dictionaries to help with dates
Only for English Language, however translations are welcome.
"""
MonthNames = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ]
def monthname(number):
"""
Returns the month name
for the given integer.
"""
return MonthNames[int(number)-1]
days_in_month = {
1: 31,
2: 29,
3: 31,
4: 30,
5: 31,
6: 30,
7: 31,
8: 31,
9: 30,
10: 31,
11: 30,
12: 31
}
num_to_month = {
1:'January',
2:'February',
3:'March',
4:'April',
5:'May',
6:'June',
7:'July',
8:'August',
9:'September',
10:'October',
11:'November',
12:'December',
}
month_to_num = {
'January': 1,
'February': 2,
'March': 3,
'April': 4,
'May': 5,
'June': 6,
'July': 7,
'August': 8,
'September': 9,
'October': 10,
'November': 11,
'December': 12,
}
def daysinmonth(var):
"""
Returns the number of days in a month.
var = month name or number
"""
try:
int(var)
num = True
except ValueError:
num = False
if num:
return days_in_month[int(var)]
number = month_to_num[var]
return days_in_month[number]
def currtime():
"""
Returns a time.time() object
"""
return time.time()
def currentmonth():
"""
Returns the integer of the current month.
To get the current month name, use monthname(currentmonth())
"""
return time.gmtime(currtime()).tm_mon
def currentyear():
return time.gmtime(currtime()).tm_year
def numwithzero(num):
"""
Returns a str where their is a
'0' in front of a number
"""
num = int(num)
if num >= 10:
return str(num)
else:
return '0%' + str(num)
def monthname(num):
"""
Returns the name of the month based on the integer.
"""
return num_to_month[int(num)]
def convertts(ts):
"""
Converts MediaWiki timestamps (ISO 8601)
to a human readable one.
"""
epochts = int(time.mktime(time.strptime(ts, '%Y-%m-%dT%H:%M:%SZ')))
st = time.gmtime(epochts)
year = str(st.tm_year)
hour = str(st.tm_hour)
min = str(st.tm_min)
monthname1 = monthname(st.tm_mon)
day = str(st.tm_mday)
return '%s:%s, %s %s %s' %(hour, min, day, monthname1, year)
|
mit
| 8,878,393,515,859,586,000
| 17.720339
| 139
| 0.634224
| false
| 2.44629
| false
| false
| false
|
vmuriart/grako
|
grako/contexts.py
|
1
|
22161
|
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import functools
from collections import namedtuple
from contextlib import contextmanager
from grako.util import notnone, ustr, prune_dict, is_list, info, safe_name
from grako.ast import AST
from grako import buffering
from grako import color
from grako.exceptions import (
FailedCut,
FailedLeftRecursion,
FailedLookahead,
FailedParse,
FailedPattern,
FailedSemantics,
FailedKeywordSemantics,
FailedToken,
OptionSucceeded
)
__all__ = ['ParseInfo', 'ParseContext']
ParseInfo = namedtuple(
'ParseInfo',
[
'buffer',
'rule',
'pos',
'endpos'
]
)
# decorator for rule implementation methods
def graken(*params, **kwparams):
def decorator(rule):
@functools.wraps(rule)
def wrapper(self):
name = rule.__name__
# remove the single leading and trailing underscore
# that the parser generator added
name = name[1:-1]
return self._call(rule, name, params, kwparams)
return wrapper
return decorator
class Closure(list):
pass
class ParseContext(object):
def __init__(self,
semantics=None,
parseinfo=False,
trace=False,
encoding='utf-8',
comments_re=None,
eol_comments_re=None,
whitespace=None,
ignorecase=False,
nameguard=None,
memoize_lookaheads=True,
left_recursion=True,
trace_length=72,
trace_separator=':',
trace_filename=False,
colorize=False,
keywords=None,
namechars='',
**kwargs):
super(ParseContext, self).__init__()
self._buffer = None
self.semantics = semantics
self.encoding = encoding
self.parseinfo = parseinfo
self.trace = trace
self.trace_length = trace_length
self.trace_separator = trace_separator
self.trace_filename = trace_filename
self.comments_re = comments_re
self.eol_comments_re = eol_comments_re
self.whitespace = whitespace
self.ignorecase = ignorecase
self.nameguard = nameguard
self.memoize_lookaheads = memoize_lookaheads
self.left_recursion = left_recursion
self.namechars = namechars
self._ast_stack = [AST()]
self._concrete_stack = [None]
self._rule_stack = []
self._cut_stack = [False]
self._memoization_cache = dict()
self._last_node = None
self._state = None
self._lookahead = 0
self._recursive_results = dict()
self._recursive_eval = []
self._recursive_head = []
self.colorize = colorize
self.keywords = set(keywords or [])
def _reset(self,
text=None,
filename=None,
semantics=None,
trace=None,
comments_re=None,
eol_comments_re=None,
whitespace=None,
ignorecase=None,
nameguard=None,
memoize_lookaheads=None,
left_recursion=None,
colorize=False,
namechars='',
**kwargs):
if ignorecase is None:
ignorecase = self.ignorecase
if nameguard is None:
nameguard = self.nameguard
if memoize_lookaheads is not None:
self.memoize_lookaheads = memoize_lookaheads
if left_recursion is not None:
self.left_recursion = left_recursion
if trace is not None:
self.trace = trace
if semantics is not None:
self.semantics = semantics
if colorize is not None:
self.colorize = colorize
if self.colorize:
color.init()
if isinstance(text, buffering.Buffer):
buffer = text
else:
buffer = buffering.Buffer(
text,
filename=filename,
comments_re=comments_re or self.comments_re,
eol_comments_re=eol_comments_re or self.eol_comments_re,
whitespace=notnone(whitespace, default=self.whitespace),
ignorecase=ignorecase,
nameguard=nameguard,
namechars=namechars or self.namechars,
**kwargs)
self._buffer = buffer
self._ast_stack = [AST()]
self._concrete_stack = [None]
self._rule_stack = []
self._cut_stack = [False]
self._memoization_cache = dict()
self._last_node = None
self._state = None
self._lookahead = 0
self._recursive_results = dict()
self._recursive_eval = []
self._recursive_head = []
def parse(self,
text,
rule_name='start',
filename=None,
semantics=None,
trace=False,
whitespace=None,
**kwargs):
try:
self.parseinfo = kwargs.pop('parseinfo', self.parseinfo)
self._reset(
text=text,
filename=filename,
semantics=semantics,
trace=trace or self.trace,
whitespace=whitespace if whitespace is not None else self.whitespace,
**kwargs
)
rule = self._find_rule(rule_name)
result = rule()
self.ast[rule_name] = result
return result
except FailedCut as e:
raise e.nested
finally:
self._clear_cache()
def goto(self, pos):
self._buffer.goto(pos)
@property
def last_node(self):
return self._last_node
@last_node.setter
def last_node(self, value):
self._last_node = value
@property
def _pos(self):
return self._buffer.pos
def _clear_cache(self):
self._memoization_cache = dict()
self._recursive_results = dict()
def _goto(self, pos):
self._buffer.goto(pos)
def _next_token(self):
self._buffer.next_token()
@property
def ast(self):
return self._ast_stack[-1]
@ast.setter
def ast(self, value):
self._ast_stack[-1] = value
def name_last_node(self, name):
self.ast[name] = self.last_node
def add_last_node_to_name(self, name):
self.ast.setlist(name, self.last_node)
def _push_ast(self):
self._push_cst()
self._ast_stack.append(AST())
def _pop_ast(self):
self._pop_cst()
return self._ast_stack.pop()
@property
def cst(self):
return self._concrete_stack[-1]
@cst.setter
def cst(self, value):
self._concrete_stack[-1] = value
def _push_cst(self):
self._concrete_stack.append(None)
def _pop_cst(self):
return self._concrete_stack.pop()
def _add_cst_node(self, node):
if node is None:
return
previous = self.cst
if previous is None:
self.cst = self._copy_node(node)
elif is_list(previous):
previous.append(node)
else:
self.cst = [previous, node]
def _extend_cst(self, node):
if node is None:
return
previous = self.cst
if previous is None:
self.cst = self._copy_node(node)
elif is_list(node):
if is_list(previous):
previous.extend(node)
else:
self.cst = [previous] + node
elif is_list(previous):
previous.append(node)
else:
self.cst = [previous, node]
def _copy_node(self, node):
if node is None:
return None
elif is_list(node):
return node[:]
else:
return node
def _is_cut_set(self):
return self._cut_stack[-1]
def _cut(self):
self._cut_stack[-1] = True
# Kota Mizushima et al say that we can throw away
# memos for previous positions in the buffer under
# certain circumstances, without affecting the linearity
# of PEG parsing.
# http://goo.gl/VaGpj
#
# We adopt the heuristic of always dropping the cache for
# positions less than the current cut position. It remains to
# be proven if doing it this way affects linearity. Empirically,
# it hasn't.
cutpos = self._pos
def prune_cache(cache):
prune_dict(cache, lambda k, _: k[0] < cutpos)
prune_cache(self._memoization_cache)
prune_cache(self._recursive_results)
def _push_cut(self):
self._cut_stack.append(False)
def _pop_cut(self):
return self._cut_stack.pop()
def _enter_lookahead(self):
self._lookahead += 1
def _leave_lookahead(self):
self._lookahead -= 1
def _memoization(self):
return self.memoize_lookaheads or self._lookahead == 0
def _rulestack(self):
stack = self.trace_separator.join(self._rule_stack)
if len(stack) > self.trace_length:
stack = '...' + stack[-self.trace_length:].lstrip(self.trace_separator)
return stack
def _find_rule(self, name):
return None
def _find_semantic_rule(self, name):
if self.semantics is None:
return None, None
postproc = getattr(self.semantics, '_postproc', None)
if not callable(postproc):
postproc = None
rule = getattr(self.semantics, safe_name(name), None)
if callable(rule):
return rule, postproc
rule = getattr(self.semantics, '_default', None)
if callable(rule):
return rule, postproc
return None, postproc
def _trace(self, msg, *params):
if self.trace:
msg = msg % params
info(ustr(msg), file=sys.stderr)
def _trace_event(self, event):
if self.trace:
fname = ''
if self.trace_filename:
fname = self._buffer.line_info().filename + '\n'
self._trace('%s \n%s%s \n',
event + ' ' + self._rulestack(),
color.Style.DIM + fname,
color.Style.NORMAL + self._buffer.lookahead().rstrip('\r\n')
)
def _trace_match(self, token, name=None, failed=False):
if self.trace:
fname = ''
if self.trace_filename:
fname = self._buffer.line_info().filename + '\n'
name = '/%s/' % name if name else ''
fgcolor = color.Fore.GREEN + '< 'if not failed else color.Fore.RED + '! '
self._trace(
color.Style.BRIGHT + fgcolor + '"%s" %s\n%s%s\n',
token,
name,
color.Style.DIM + fname,
color.Style.NORMAL + self._buffer.lookahead().rstrip('\r\n')
)
def _error(self, item, etype=FailedParse):
raise etype(
self._buffer,
list(reversed(self._rule_stack[:])),
item
)
def _fail(self):
self._error('fail')
def _get_parseinfo(self, node, name, start):
return ParseInfo(
self._buffer,
name,
start,
self._pos
)
def _call(self, rule, name, params, kwparams):
self._rule_stack.append(name)
pos = self._pos
try:
self._trace_event(color.Fore.YELLOW + color.Style.BRIGHT + '>')
self._last_node = None
node, newpos, newstate = self._invoke_rule(rule, name, params, kwparams)
self._goto(newpos)
self._state = newstate
self._trace_event(color.Fore.GREEN + color.Style.BRIGHT + '<')
self._add_cst_node(node)
self._last_node = node
return node
except FailedPattern:
self._error('Expecting <%s>' % name)
except FailedParse:
self._trace_event(color.Fore.RED + color.Style.BRIGHT + '!')
self._goto(pos)
raise
finally:
self._rule_stack.pop()
def _invoke_rule(self, rule, name, params, kwparams):
cache = self._memoization_cache
pos = self._pos
key = (pos, rule, self._state)
if key in cache:
memo = cache[key]
memo = self._left_recursion_check(name, key, memo)
if isinstance(memo, Exception):
raise memo
return memo
self._set_left_recursion_guard(name, key)
self._push_ast()
try:
if name[0].islower():
self._next_token()
rule(self)
node = self.ast
if not node:
node = self.cst
elif '@' in node:
node = node['@'] # override the AST
elif self.parseinfo:
node._parseinfo = self._get_parseinfo(
node,
name,
pos
)
node = self._invoke_semantic_rule(name, node, params, kwparams)
result = (node, self._pos, self._state)
result = self._left_recurse(rule, name, pos, key, result, params, kwparams)
if self._memoization() and not self._in_recursive_loop():
cache[key] = result
return result
except FailedParse as e:
if self._memoization():
cache[key] = e
raise
finally:
self._pop_ast()
def _set_left_recursion_guard(self, name, key):
exception = FailedLeftRecursion(
self._buffer,
list(reversed(self._rule_stack[:])),
name
)
# Alessandro Warth et al say that we can deal with
# direct and indirect left-recursion by seeding the
# memoization cache with a parse failure.
#
# http://www.vpri.org/pdf/tr2007002_packrat.pdf
#
if self._memoization():
self._memoization_cache[key] = exception
def _left_recursion_check(self, name, key, memo):
if isinstance(memo, FailedLeftRecursion) and self.left_recursion:
# At this point we know we've already seen this rule
# at this position. Either we've got a potential
# result from a previous pass that we can return, or
# we make a note of the rule so that we can take
# action as we unwind the rule stack.
if key in self._recursive_results:
memo = self._recursive_results[key]
else:
self._recursive_head.append(name)
return memo
def _in_recursive_loop(self):
head = self._recursive_head
return head and head[-1] in self._rule_stack
def _left_recurse(self, rule, name, pos, key, result, params, kwparams):
if self._memoization():
self._recursive_results[key] = result
# If the current name is in the head, then we've just
# unwound to the highest rule in the recursion
cache = self._memoization_cache
last_pos = pos
if (
[name] == self._recursive_head[-1:] and
self._recursive_head[-1:] != self._recursive_eval[-1:]
):
# Repeatedly apply the rule until it can't consume any
# more. We store the last good result each time. Prior
# to doing so we reset the position and remove any
# failures from the cache.
last_result = result
self._recursive_eval.append(name)
while self._pos > last_pos:
last_result = result
last_pos = self._pos
self._goto(pos)
prune_dict(cache, lambda _, v: isinstance(v, FailedParse))
try:
result = self._invoke_rule(rule, name, params, kwparams)
except FailedParse:
pass
result = last_result
self._recursive_results = dict()
self._recursive_head.pop()
self._recursive_eval.pop()
return result
def _invoke_semantic_rule(self, name, node, params, kwparams):
semantic_rule, postproc = self._find_semantic_rule(name)
try:
if semantic_rule:
node = semantic_rule(node, *(params or ()), **(kwparams or {}))
if postproc is not None:
postproc(self, node)
return node
except FailedSemantics as e:
self._error(str(e), FailedParse)
def _token(self, token):
self._next_token()
if self._buffer.match(token) is None:
self._trace_match(token, failed=True)
self._error(token, etype=FailedToken)
self._trace_match(token)
self._add_cst_node(token)
self._last_node = token
return token
def _constant(self, literal):
self._next_token()
self._trace_match(literal)
self._add_cst_node(literal)
self._last_node = literal
return literal
def _pattern(self, pattern):
token = self._buffer.matchre(pattern)
if token is None:
self._trace_match('', pattern, failed=True)
self._error(pattern, etype=FailedPattern)
self._trace_match(token, pattern)
self._add_cst_node(token)
self._last_node = token
return token
def _eof(self):
return self._buffer.atend()
def _eol(self):
return self._buffer.ateol()
def _check_eof(self):
self._next_token()
if not self._buffer.atend():
self._error('Expecting end of text.')
@contextmanager
def _try(self):
p = self._pos
s = self._state
ast_copy = self.ast.copy()
self._push_ast()
self.last_node = None
try:
self.ast = ast_copy
yield
ast = self.ast
cst = self.cst
except:
self._goto(p)
self._state = s
raise
finally:
self._pop_ast()
self.ast = ast
self._extend_cst(cst)
self.last_node = cst
@contextmanager
def _option(self):
self.last_node = None
self._push_cut()
try:
with self._try():
yield
raise OptionSucceeded()
except FailedCut:
raise
except FailedParse as e:
if self._is_cut_set():
raise FailedCut(e)
finally:
self._pop_cut()
@contextmanager
def _choice(self):
self.last_node = None
with self._try():
try:
yield
except OptionSucceeded:
pass
@contextmanager
def _optional(self):
self.last_node = None
with self._choice():
with self._option():
yield
@contextmanager
def _group(self):
self._push_cst()
try:
yield
cst = self.cst
finally:
self._pop_cst()
self._extend_cst(cst)
self.last_node = cst
@contextmanager
def _if(self):
p = self._pos
s = self._state
self._push_ast()
self._enter_lookahead()
try:
yield
finally:
self._leave_lookahead()
self._goto(p)
self._state = s
self._pop_ast() # simply discard
@contextmanager
def _ifnot(self):
try:
with self._if():
yield
except FailedParse:
pass
else:
self._error('', etype=FailedLookahead)
@contextmanager
def _ignore(self):
self._push_cst()
try:
self.cst = None
yield
finally:
self._pop_cst()
def _repeater(self, block, prefix=None):
while True:
self._push_cut()
self._push_cst()
try:
p = self._pos
with self._try():
if prefix:
with self._ignore():
prefix()
self._cut()
block()
cst = self.cst
if self._pos == p:
self._error('empty closure')
except FailedCut:
raise
except FailedParse as e:
if self._is_cut_set():
raise FailedCut(e)
break
finally:
self._pop_cst()
self._pop_cut()
self._add_cst_node(cst)
def _closure(self, block):
self._push_cst()
try:
self.cst = []
self._repeater(block)
cst = Closure(self.cst)
finally:
self._pop_cst()
self._add_cst_node(cst)
self.last_node = cst
return cst
def _positive_closure(self, block, prefix=None):
self._push_cst()
try:
self.cst = None
with self._try():
block()
self.cst = [self.cst]
self._repeater(block, prefix=prefix)
cst = Closure(self.cst)
finally:
self._pop_cst()
self._add_cst_node(cst)
self.last_node = cst
return cst
def _empty_closure(self):
cst = Closure([])
self._add_cst_node(cst)
self.last_node = cst
return cst
def _check_name(self):
name = self.last_node
if (self.ignorecase and name.upper() or name) in self.keywords:
raise FailedKeywordSemantics('"%s" is a reserved word' % name)
|
bsd-2-clause
| 3,773,346,253,559,332,400
| 27.930809
| 87
| 0.515184
| false
| 4.216324
| false
| false
| false
|
meejah/txtorcon
|
txtorcon/socks.py
|
1
|
23368
|
# in-progress; implementing SOCKS5 client-side stuff as extended by
# tor because txsocksx will not be getting Python3 support any time
# soon, and its underlying dependency (Parsely) also doesn't support
# Python3. Also, Tor's SOCKS5 implementation is especially simple,
# since it doesn't do BIND or UDP ASSOCIATE.
from __future__ import print_function
import six
import struct
from socket import inet_pton, inet_ntoa, inet_aton, AF_INET6, AF_INET
from twisted.internet.defer import inlineCallbacks, returnValue, Deferred
from twisted.internet.protocol import Protocol, Factory
from twisted.internet.address import IPv4Address, IPv6Address, HostnameAddress
from twisted.python.failure import Failure
from twisted.protocols import portforward
from twisted.protocols import tls
from twisted.internet.interfaces import IStreamClientEndpoint
from zope.interface import implementer
import ipaddress
import automat
from txtorcon import util
__all__ = (
'resolve',
'resolve_ptr',
'SocksError',
'GeneralServerFailureError',
'ConnectionNotAllowedError',
'NetworkUnreachableError',
'HostUnreachableError',
'ConnectionRefusedError',
'TtlExpiredError',
'CommandNotSupportedError',
'AddressTypeNotSupportedError',
'TorSocksEndpoint',
)
def _create_ip_address(host, port):
if not isinstance(host, six.text_type):
raise ValueError(
"'host' must be {}, not {}".format(six.text_type, type(host))
)
try:
a = ipaddress.ip_address(host)
except ValueError:
a = None
if isinstance(a, ipaddress.IPv4Address):
return IPv4Address('TCP', host, port)
if isinstance(a, ipaddress.IPv6Address):
return IPv6Address('TCP', host, port)
addr = HostnameAddress(host, port)
addr.host = host
return addr
class _SocksMachine(object):
"""
trying to prototype the SOCKS state-machine in automat
This is a SOCKS state machine to make a single request.
"""
_machine = automat.MethodicalMachine()
SUCCEEDED = 0x00
REPLY_IPV4 = 0x01
REPLY_HOST = 0x03
REPLY_IPV6 = 0x04
# XXX address = (host, port) instead
def __init__(self, req_type, host,
port=0,
on_disconnect=None,
on_data=None,
create_connection=None):
if req_type not in self._dispatch:
raise ValueError(
"Unknown request type '{}'".format(req_type)
)
if req_type == 'CONNECT' and create_connection is None:
raise ValueError(
"create_connection function required for '{}'".format(
req_type
)
)
if not isinstance(host, (bytes, str, six.text_type)):
raise ValueError(
"'host' must be text (not {})".format(type(host))
)
# XXX what if addr is None?
self._req_type = req_type
self._addr = _create_ip_address(six.text_type(host), port)
self._data = b''
self._on_disconnect = on_disconnect
self._create_connection = create_connection
# XXX FIXME do *one* of these:
self._on_data = on_data
self._outgoing_data = []
# the other side of our proxy
self._sender = None
self._when_done = util.SingleObserver()
def when_done(self):
"""
Returns a Deferred that fires when we're done
"""
return self._when_done.when_fired()
def _data_to_send(self, data):
if self._on_data:
self._on_data(data)
else:
self._outgoing_data.append(data)
def send_data(self, callback):
"""
drain all pending data by calling `callback()` on it
"""
# a "for x in self._outgoing_data" would potentially be more
# efficient, but then there's no good way to bubble exceptions
# from callback() out without lying about how much data we
# processed .. or eat the exceptions in here.
while len(self._outgoing_data):
data = self._outgoing_data.pop(0)
callback(data)
def feed_data(self, data):
# I feel like maybe i'm doing all this buffering-stuff
# wrong. but I also don't want a bunch of "received 1 byte"
# etc states hanging off everything that can "get data"
self._data += data
self.got_data()
@_machine.output()
def _parse_version_reply(self):
"waiting for a version reply"
if len(self._data) >= 2:
reply = self._data[:2]
self._data = self._data[2:]
(version, method) = struct.unpack('BB', reply)
if version == 5 and method in [0x00, 0x02]:
self.version_reply(method)
else:
if version != 5:
self.version_error(SocksError(
"Expected version 5, got {}".format(version)))
else:
self.version_error(SocksError(
"Wanted method 0 or 2, got {}".format(method)))
def _parse_ipv4_reply(self):
if len(self._data) >= 10:
addr = inet_ntoa(self._data[4:8])
port = struct.unpack('H', self._data[8:10])[0]
self._data = self._data[10:]
if self._req_type == 'CONNECT':
self.reply_ipv4(addr, port)
else:
self.reply_domain_name(addr)
def _parse_ipv6_reply(self):
if len(self._data) >= 22:
addr = self._data[4:20]
port = struct.unpack('H', self._data[20:22])[0]
self._data = self._data[22:]
self.reply_ipv6(addr, port)
def _parse_domain_name_reply(self):
assert len(self._data) >= 8 # _parse_request_reply checks this
addrlen = struct.unpack('B', self._data[4:5])[0]
# may simply not have received enough data yet...
if len(self._data) < (5 + addrlen + 2):
return
addr = self._data[5:5 + addrlen]
# port = struct.unpack('H', self._data[5 + addrlen:5 + addrlen + 2])[0]
self._data = self._data[5 + addrlen + 2:]
self.reply_domain_name(addr)
@_machine.output()
def _parse_request_reply(self):
"waiting for a reply to our request"
# we need at least 6 bytes of data: 4 for the "header", such
# as it is, and 2 more if it's DOMAINNAME (for the size) or 4
# or 16 more if it's an IPv4/6 address reply. plus there's 2
# bytes on the end for the bound port.
if len(self._data) < 8:
return
msg = self._data[:4]
# not changing self._data yet, in case we've not got
# enough bytes so far.
(version, reply, _, typ) = struct.unpack('BBBB', msg)
if version != 5:
self.reply_error(SocksError(
"Expected version 5, got {}".format(version)))
return
if reply != self.SUCCEEDED:
self.reply_error(_create_socks_error(reply))
return
reply_dispatcher = {
self.REPLY_IPV4: self._parse_ipv4_reply,
self.REPLY_HOST: self._parse_domain_name_reply,
self.REPLY_IPV6: self._parse_ipv6_reply,
}
try:
method = reply_dispatcher[typ]
except KeyError:
self.reply_error(SocksError(
"Unexpected response type {}".format(typ)))
return
method()
@_machine.output()
def _make_connection(self, addr, port):
"make our proxy connection"
sender = self._create_connection(addr, port)
# XXX look out! we're depending on this "sender" implementing
# certain Twisted APIs, and the state-machine shouldn't depend
# on that.
# XXX also, if sender implements producer/consumer stuff, we
# should register ourselves (and implement it to) -- but this
# should really be taking place outside the state-machine in
# "the I/O-doing" stuff
self._sender = sender
self._when_done.fire(sender)
@_machine.output()
def _domain_name_resolved(self, domain):
self._when_done.fire(domain)
@_machine.input()
def connection(self):
"begin the protocol (i.e. connection made)"
@_machine.input()
def disconnected(self, error):
"the connection has gone away"
@_machine.input()
def got_data(self):
"we recevied some data and buffered it"
@_machine.input()
def version_reply(self, auth_method):
"the SOCKS server replied with a version"
@_machine.input()
def version_error(self, error):
"the SOCKS server replied, but we don't understand"
@_machine.input()
def reply_error(self, error):
"the SOCKS server replied with an error"
@_machine.input()
def reply_ipv4(self, addr, port):
"the SOCKS server told me an IPv4 addr, port"
@_machine.input()
def reply_ipv6(self, addr, port):
"the SOCKS server told me an IPv6 addr, port"
@_machine.input()
def reply_domain_name(self, domain):
"the SOCKS server told me a domain-name"
@_machine.input()
def answer(self):
"the SOCKS server replied with an answer"
@_machine.output()
def _send_version(self):
"sends a SOCKS version reply"
self._data_to_send(
# for anonymous(0) *and* authenticated (2): struct.pack('BBBB', 5, 2, 0, 2)
struct.pack('BBB', 5, 1, 0)
)
@_machine.output()
def _disconnect(self, error):
"done"
if self._on_disconnect:
self._on_disconnect(str(error))
if self._sender:
self._sender.connectionLost(Failure(error))
self._when_done.fire(Failure(error))
@_machine.output()
def _send_request(self, auth_method):
"send the request (connect, resolve or resolve_ptr)"
assert auth_method == 0x00 # "no authentication required"
return self._dispatch[self._req_type](self)
@_machine.output()
def _relay_data(self):
"relay any data we have"
if self._data:
d = self._data
self._data = b''
# XXX this is "doing I/O" in the state-machine and it
# really shouldn't be ... probably want a passed-in
# "relay_data" callback or similar?
self._sender.dataReceived(d)
def _send_connect_request(self):
"sends CONNECT request"
# XXX needs to support v6 ... or something else does
host = self._addr.host
port = self._addr.port
if isinstance(self._addr, (IPv4Address, IPv6Address)):
is_v6 = isinstance(self._addr, IPv6Address)
self._data_to_send(
struct.pack(
'!BBBB4sH',
5, # version
0x01, # command
0x00, # reserved
0x04 if is_v6 else 0x01,
inet_pton(AF_INET6 if is_v6 else AF_INET, host),
port,
)
)
else:
host = host.encode('ascii')
self._data_to_send(
struct.pack(
'!BBBBB{}sH'.format(len(host)),
5, # version
0x01, # command
0x00, # reserved
0x03,
len(host),
host,
port,
)
)
@_machine.output()
def _send_resolve_request(self):
"sends RESOLVE_PTR request (Tor custom)"
host = self._addr.host.encode()
self._data_to_send(
struct.pack(
'!BBBBB{}sH'.format(len(host)),
5, # version
0xF0, # command
0x00, # reserved
0x03, # DOMAINNAME
len(host),
host,
0, # self._addr.port?
)
)
@_machine.output()
def _send_resolve_ptr_request(self):
"sends RESOLVE_PTR request (Tor custom)"
addr_type = 0x04 if isinstance(self._addr, ipaddress.IPv4Address) else 0x01
encoded_host = inet_aton(self._addr.host)
self._data_to_send(
struct.pack(
'!BBBB4sH',
5, # version
0xF1, # command
0x00, # reserved
addr_type,
encoded_host,
0, # port; unused? SOCKS is fun
)
)
@_machine.state(initial=True)
def unconnected(self):
"not yet connected"
@_machine.state()
def sent_version(self):
"we've sent our version request"
@_machine.state()
def sent_request(self):
"we've sent our stream/etc request"
@_machine.state()
def relaying(self):
"received our response, now we can relay"
@_machine.state()
def abort(self, error_message):
"we've encountered an error"
@_machine.state()
def done(self):
"operations complete"
unconnected.upon(
connection,
enter=sent_version,
outputs=[_send_version],
)
sent_version.upon(
got_data,
enter=sent_version,
outputs=[_parse_version_reply],
)
sent_version.upon(
version_error,
enter=abort,
outputs=[_disconnect],
)
sent_version.upon(
version_reply,
enter=sent_request,
outputs=[_send_request],
)
sent_version.upon(
disconnected,
enter=unconnected,
outputs=[_disconnect]
)
sent_request.upon(
got_data,
enter=sent_request,
outputs=[_parse_request_reply],
)
sent_request.upon(
reply_ipv4,
enter=relaying,
outputs=[_make_connection],
)
sent_request.upon(
reply_ipv6,
enter=relaying,
outputs=[_make_connection],
)
# XXX this isn't always a _domain_name_resolved -- if we're a
# req_type CONNECT then it's _make_connection_domain ...
sent_request.upon(
reply_domain_name,
enter=done,
outputs=[_domain_name_resolved],
)
sent_request.upon(
reply_error,
enter=abort,
outputs=[_disconnect],
)
# XXX FIXME this needs a test
sent_request.upon(
disconnected,
enter=abort,
outputs=[_disconnect], # ... or is this redundant?
)
relaying.upon(
got_data,
enter=relaying,
outputs=[_relay_data],
)
relaying.upon(
disconnected,
enter=done,
outputs=[_disconnect],
)
abort.upon(
got_data,
enter=abort,
outputs=[],
)
abort.upon(
disconnected,
enter=abort,
outputs=[],
)
done.upon(
disconnected,
enter=done,
outputs=[],
)
_dispatch = {
'CONNECT': _send_connect_request,
'RESOLVE': _send_resolve_request,
'RESOLVE_PTR': _send_resolve_ptr_request,
}
class _TorSocksProtocol(Protocol):
def __init__(self, host, port, socks_method, factory):
self._machine = _SocksMachine(
req_type=socks_method,
host=host, # noqa unicode() on py3, py2? we want idna, actually?
port=port,
on_disconnect=self._on_disconnect,
on_data=self._on_data,
create_connection=self._create_connection,
)
self._factory = factory
def when_done(self):
return self._machine.when_done()
def connectionMade(self):
self._machine.connection()
# we notify via the factory that we have teh
# locally-connecting host -- this is e.g. used by the "stream
# over one particular circuit" code to determine the local
# port that "our" SOCKS connection went to
self.factory._did_connect(self.transport.getHost())
def connectionLost(self, reason):
self._machine.disconnected(SocksError(reason))
def dataReceived(self, data):
self._machine.feed_data(data)
def _on_data(self, data):
self.transport.write(data)
def _create_connection(self, addr, port):
addr = IPv4Address('TCP', addr, port)
sender = self._factory.buildProtocol(addr)
client_proxy = portforward.ProxyClient()
sender.makeConnection(self.transport)
# portforward.ProxyClient is going to call setPeer but this
# probably doesn't have it...
setattr(sender, 'setPeer', lambda _: None)
client_proxy.setPeer(sender)
self._sender = sender
return sender
def _on_disconnect(self, error_message):
self.transport.loseConnection()
# self.transport.abortConnection()#SocksError(error_message)) ?
class _TorSocksFactory(Factory):
protocol = _TorSocksProtocol
# XXX should do validation on this stuff so we get errors before
# building the protocol
def __init__(self, *args, **kw):
self._args = args
self._kw = kw
self._host = None
self._when_connected = util.SingleObserver()
def _get_address(self):
"""
Returns a Deferred that fires with the transport's getHost()
when this SOCKS protocol becomes connected.
"""
return self._when_connected.when_fired()
def _did_connect(self, host):
self._host = host
self._when_connected.fire(host)
def buildProtocol(self, addr):
p = self.protocol(*self._args, **self._kw)
p.factory = self
return p
class SocksError(Exception):
code = None
message = ''
def __init__(self, message='', code=None):
super(SocksError, self).__init__(message or self.message)
self.message = message or self.message
self.code = code or self.code
class GeneralServerFailureError(SocksError):
code = 0x01
message = 'general SOCKS server failure'
class ConnectionNotAllowedError(SocksError):
code = 0x02
message = 'connection not allowed by ruleset'
class NetworkUnreachableError(SocksError):
code = 0x03
message = 'Network unreachable'
class HostUnreachableError(SocksError):
code = 0x04
message = 'Host unreachable'
class ConnectionRefusedError(SocksError):
code = 0x05
message = 'Connection refused'
class TtlExpiredError(SocksError):
code = 0x06
message = 'TTL expired'
class CommandNotSupportedError(SocksError):
code = 0x07
message = 'Command not supported'
class AddressTypeNotSupportedError(SocksError):
code = 0x08
message = 'Address type not supported'
_socks_errors = {cls.code: cls for cls in SocksError.__subclasses__()}
def _create_socks_error(code):
try:
return _socks_errors[code]()
except KeyError:
return SocksError("Unknown SOCKS error-code {}".format(code),
code=code)
@inlineCallbacks
def resolve(tor_endpoint, hostname):
"""
This is easier to use via :meth:`txtorcon.Tor.dns_resolve`
:param tor_endpoint: the Tor SOCKS endpoint to use.
:param hostname: the hostname to look up.
"""
if six.PY2 and isinstance(hostname, str):
hostname = unicode(hostname) # noqa
elif six.PY3 and isinstance(hostname, bytes):
hostname = hostname.decode('ascii')
factory = _TorSocksFactory(
hostname, 0, 'RESOLVE', None,
)
proto = yield tor_endpoint.connect(factory)
result = yield proto.when_done()
returnValue(result)
@inlineCallbacks
def resolve_ptr(tor_endpoint, ip):
"""
This is easier to use via :meth:`txtorcon.Tor.dns_resolve_ptr`
:param tor_endpoint: the Tor SOCKS endpoint to use.
:param ip: the IP address to look up.
"""
if six.PY2 and isinstance(ip, str):
ip = unicode(ip) # noqa
elif six.PY3 and isinstance(ip, bytes):
ip = ip.decode('ascii')
factory = _TorSocksFactory(
ip, 0, 'RESOLVE_PTR', None,
)
proto = yield tor_endpoint.connect(factory)
result = yield proto.when_done()
returnValue(result)
@implementer(IStreamClientEndpoint)
class TorSocksEndpoint(object):
"""
Represents an endpoint which will talk to a Tor SOCKS port.
These should usually not be instantiated directly, instead use
:meth:`txtorcon.TorConfig.socks_endpoint`.
"""
# XXX host, port args should be (host, port) tuple, or
# IAddress-implementer?
def __init__(self, socks_endpoint, host, port, tls=False):
self._proxy_ep = socks_endpoint # can be Deferred
assert self._proxy_ep is not None
if six.PY2 and isinstance(host, str):
host = unicode(host) # noqa
if six.PY3 and isinstance(host, bytes):
host = host.decode('ascii')
self._host = host
self._port = port
self._tls = tls
self._socks_factory = None
self._when_address = util.SingleObserver()
def _get_address(self):
"""
Returns a Deferred that fires with the source IAddress of the
underlying SOCKS connection (i.e. usually a
twisted.internet.address.IPv4Address)
circuit.py uses this; better suggestions welcome!
"""
return self._when_address.when_fired()
@inlineCallbacks
def connect(self, factory):
# further wrap the protocol if we're doing TLS.
# "pray i do not wrap the protocol further".
if self._tls:
# XXX requires Twisted 14+
from twisted.internet.ssl import optionsForClientTLS
if self._tls is True:
context = optionsForClientTLS(self._host)
else:
context = self._tls
tls_factory = tls.TLSMemoryBIOFactory(context, True, factory)
socks_factory = _TorSocksFactory(
self._host, self._port, 'CONNECT', tls_factory,
)
else:
socks_factory = _TorSocksFactory(
self._host, self._port, 'CONNECT', factory,
)
self._socks_factory = socks_factory
# forward our address (when we get it) to any listeners
self._socks_factory._get_address().addBoth(self._when_address.fire)
# XXX isn't this just maybeDeferred()
if isinstance(self._proxy_ep, Deferred):
proxy_ep = yield self._proxy_ep
if not IStreamClientEndpoint.providedBy(proxy_ep):
raise ValueError(
"The Deferred provided as 'socks_endpoint' must "
"resolve to an IStreamClientEndpoint provider (got "
"{})".format(type(proxy_ep).__name__)
)
else:
proxy_ep = self._proxy_ep
# socks_proto = yield proxy_ep.connect(socks_factory)
proto = yield proxy_ep.connect(socks_factory)
wrapped_proto = yield proto.when_done()
if self._tls:
returnValue(wrapped_proto.wrappedProtocol)
else:
returnValue(wrapped_proto)
|
mit
| 3,089,544,794,076,965,400
| 29.910053
| 87
| 0.572663
| false
| 4.009609
| false
| false
| false
|
appi147/Jarvis
|
jarviscli/plugin.py
|
1
|
6525
|
from inspect import cleandoc, isclass
import pluginmanager
from requests import ConnectionError
# Constants
# platform
MACOS = "MACOS"
LINUX = "LINUX"
WINDOWS = "WINDOWS"
# Shortcut for MACOS + LINUX
UNIX = "UNIX"
def plugin(name):
"""
Convert function in Plugin Class
@python(platform=LINUX, native="ap-hotspot")
def hotspot_start(jarvis, s):
system("sudo ap-hotspot start")
"""
def create_plugin(run):
plugin_class = type(
run.__name__, Plugin.__bases__, dict(
Plugin.__dict__))
plugin_class.__doc__ = run.__doc__
if isclass(run):
# class -> object
run = run()
# create class
plugin_class._require = []
plugin_class._complete = []
plugin_class._alias = []
plugin_class._name = name
plugin_class._backend = (run,)
plugin_class._backend_instance = run
return plugin_class
return create_plugin
def require(network=None, platform=None, native=None):
require = []
if network is not None:
require.append(('network', network))
if platform is not None:
require.append(('platform', platform))
if native is not None:
require.append(('native', native))
def __require(plugin):
plugin._require.extend(require)
return plugin
return __require
def complete(*complete):
def __complete(plugin):
plugin._complete.extend(complete)
return plugin
return __complete
def alias(*alias):
def __alias(plugin):
plugin._alias.extend(alias)
return plugin
return __alias
def _yield_something(values):
for value in values:
yield value
class PluginStorage(object):
def __init__(self):
self._sub_plugins = {}
def add_plugin(self, name, plugin_to_add):
self._sub_plugins[name] = plugin_to_add
def get_plugins(self, name=None):
if name is None:
return self._sub_plugins
if name in self._sub_plugins:
return self._sub_plugins[name]
return None
def change_with(self, plugin_new):
plugin_new._sub_plugins = self._sub_plugins
class Plugin(pluginmanager.IPlugin, PluginStorage):
"""
"""
_backend = None
def __init__(self):
super(pluginmanager.IPlugin, self).__init__()
self._sub_plugins = {}
def init(self, jarvis_api):
"""
Called before Jarvis starts;
Passes jarvis_api object for plugins to do initialization.
(would not be possible with __init__)
"""
if self.is_callable_plugin():
if hasattr(
self._backend[0].__class__,
"init") and callable(
getattr(
self._backend[0].__class__,
"init")):
self._backend[0].init(jarvis_api)
for plugin in self.get_plugins().values():
plugin.init(jarvis_api)
def is_callable_plugin(self):
"""
Return True, if this plugin has a executable implementation (e.g. news)
Return False, if this instance is only used for calling other plugins
(e.g. movie in 'movie search' and 'movie plot')
"""
return self._backend is not None
def get_name(self):
"""Set with @plugin(name)"""
return self._name
def require(self):
"""Set with @require"""
return self._require
def alias(self):
"""Set with @alias"""
return self._alias
def complete(self):
"""Set with @complete"""
# return default complete() if possible
if self.is_callable_plugin():
for complete in self._complete:
yield complete
# yield each sub command
for complete in self.get_plugins().keys():
yield complete
def get_doc(self):
"""Parses plugin doc string"""
doc = ""
examples = ""
extended_doc = ""
# default complete
if self.__doc__ is not None:
default_command_doc = cleandoc(self.__doc__)
default_command_doc = default_command_doc.split("-- Example:")
if len(default_command_doc) > 1:
examples += default_command_doc[1]
default_command_doc = default_command_doc[0]
doc += default_command_doc
if not doc.endswith("\n"):
doc += "\n"
doc += "\nSubcommands:"
# sub command complete
for name, sub_command in self.get_plugins().items():
doc += "\n-> {}: ".format(name)
sub_command_doc = sub_command.get_doc()
sub_command_doc = sub_command_doc.split("-- Example:")
if len(sub_command_doc) > 1:
examples += sub_command_doc[1]
sub_command_doc = sub_command_doc[0]
if '\n' not in sub_command_doc:
doc += sub_command_doc
else:
extended_doc += "\n {}:\n".format(name)
extended_doc += sub_command_doc
if not sub_command_doc.endswith("\n"):
extended_doc += "\n"
if extended_doc != "":
doc += "\n"
doc += extended_doc
if examples != "":
doc += "\n--Examples:"
doc += examples
return doc
def run(self, jarvis, s):
"""Entry point if this plugin is called"""
sub_command = jarvis.find_action(s, self.get_plugins().keys())
if sub_command is "None":
# run default
if self.is_callable_plugin():
self._backend[0](jarvis.get_api(), s)
else:
jarvis.get_api().say("Sorry, I could not recognise your command. Did you mean:")
for sub_command in self._sub_plugins.keys():
jarvis.get_api().say(" * {} {}".format(self.get_name(), sub_command))
else:
command = sub_command.split()[0]
new_s = " ".join(sub_command.split()[1:])
self.get_plugins(command).run(jarvis, new_s)
def _plugin_run_with_network_error(self, run_func, jarvis, s):
"""
Calls run_func(jarvis, s); try-catch ConnectionError
This method is auto-used if require() yields ("network", True). Do not
use m
"""
try:
run_func(jarvis, s)
except ConnectionError:
jarvis.get_api().connection_error()
|
mit
| 4,146,784,341,586,826,000
| 27.49345
| 96
| 0.540383
| false
| 4.161352
| false
| false
| false
|
brigittebigi/proceed
|
proceed/scripts/import.py
|
1
|
9928
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# ---------------------------------------------------------------------------
# ___ __ ___ ___ ____ ____ __
# | \ | \ | | / | | | \ Automatic
# |__/ |__/ | | | |__ |__ | | Conference
# | |\_ | | | | | | | Proceedings
# | | \ |___| \___ |___ |___ |__/ Generator
# ==========================================================
#
# http://www.lpl-aix.fr/~bigi/
#
# ---------------------------------------------------------------------------
# developed at:
#
# Laboratoire Parole et Langage
#
# Copyright (C) 2013-2014 Brigitte Bigi
#
# Use of this software is governed by the GPL, v3
# This banner notice must not be removed
# ---------------------------------------------------------------------------
#
# SPPAS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SPPAS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SPPAS. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
__docformat__ = "epytext"
"""
Import abstracts from a conference and save them in a directory,
in the form of one latex file per abstract.
Input can be one of sciencesconf XML file or easychair CSV file.
No options for the output style: use default.
"""
# ---------------------------------------------------------------------------
import sys
import os.path
import getopt
sys.path.append( os.path.join(os.path.dirname(os.path.dirname( os.path.abspath(__file__))), "src") )
from DataIO.Read.reader import Reader
from DataIO.Write.writer import Writer
from structs.prefs import Preferences
from structs.abstracts_themes import all_themes
from term.textprogress import TextProgress
from term.terminalcontroller import TerminalController
from sp_glob import program, author, version, copyright, url
wxop = True
try:
import wx
from wxgui.frames.import_wizard import ImportWizard
except Exception:
wxop = False
# ----------------------------------------------------------------------
# USEFUL FUNCTIONS
# ----------------------------------------------------------------------
def usage(output):
"""
Print the usage of this script on an output.
@param output is a string representing the output (for example: sys.stdout)
"""
output.write('import.py [options] where options are:\n')
output.write(' -i file Input file name [required] \n')
output.write(' -a file Authors Input file name [required if easychair] \n')
output.write(' -o output Output directory [required] \n')
output.write(' -s status Status number (0-4) [default=1=accepted]\n')
output.write(' -r reader name One of: sciencesconf or easychair [default=sciencesconf]\n')
output.write(' -S style name One of: basic, palme, nalte [default=basic]\n')
output.write(' -c compiler One of: pdflatex, xetex [default=pdflatex]\n')
output.write(' --nocsv Do not generate '+program+' CSV files\n')
output.write(' --notex Do not generate LaTeX files\n')
output.write(' --nohtml Do not generate HTML file\n')
output.write(' --help Print this help\n\n')
# End usage
# ----------------------------------------------------------------------
def Quit(message=None, status=0, usageoutput=None):
"""
Quit the program with the appropriate exit status.
@param message is a text to communicate to the user on sys.stderr.
@param status is an integer of the status exit value.
@param usageoutput is a file descriptor.
"""
if message: sys.stderr.write('export.py '+message)
if usageoutput: usage(usageoutput)
sys.exit(status)
# End Quit
# ----------------------------------------------------------------------
# --------------------------------------------------------------------------
# MAIN PROGRAM
# --------------------------------------------------------------------------
if __name__=="__main__":
# ----------------------------------------------------------------------
# Get all arguments, verify inputs.
# ----------------------------------------------------------------------
# Verify the program name and possibly some arguments
if len(sys.argv) == 1:
if not wxop:
# stop the program and print an error message
Quit(status=1, usageoutput=sys.stderr)
else:
app = wx.App(False)
ImportWizard(None)
app.MainLoop()
sys.exit(0)
# Get options (if any...)
try:
opts, args = getopt.getopt(sys.argv[1:], "i:a:o:s:r:S:c:", ["help", "nocsv", "notex", "nohtml"])
except getopt.GetoptError, err:
# Print help information and exit:
Quit(message="Error: "+str(err)+".\nUse option --help for any help.\n", status=1)
fileinput = None
authorsinput = None
output = None
extension = "tex"
status = 1 # only accepted papers
readername = "sciencesconf"
themename = "basic"
compiler = "pdflatex"
exportcsv = True
exporttex= True
exporthtml = True
# Extract options
for o, a in opts:
if o == "-i":
fileinput = a
elif o == "-a":
authorsinput = a
elif o == "-o":
output = a
elif o == "-s":
status = int(a)
elif o == "-r":
readername = a
elif o == "-S":
themename = a
elif o == "-c":
compiler = a
elif o == "--help": # need help
Quit(message='Help', status=0, usageoutput=sys.stdout)
elif o == "--nocsv":
exportcsv = False
elif o == "--notex":
exporttex = False
elif o == "--nohtml":
exporthtml = False
# Verify args
if fileinput is not None:
if not os.path.exists(fileinput):
Quit(message="Error: BAD input file name: "+fileinput+"\n", status=1)
else:
Quit(message="Error: an input is required.\n.", status=1, usageoutput=sys.stderr)
if output is None:
Quit(message="Error: an output is required.\n.", status=1, usageoutput=sys.stderr)
if readername == "easychair" and not authorsinput:
Quit(message="With easychair, an input file with authors is required.", status=1, usageoutput=sys.stderr)
try:
term = TerminalController()
print term.render('${GREEN}-----------------------------------------------------------------------${NORMAL}')
print term.render('${RED}'+program+' - Version '+version+'${NORMAL}')
print term.render('${BLUE}'+copyright+'${NORMAL}')
print term.render('${BLUE}'+url+'${NORMAL}')
print term.render('${GREEN}-----------------------------------------------------------------------${NORMAL}\n')
except:
print '-----------------------------------------------------------------------\n'
print program+' - Version '+version
print copyright
print url+'\n'
print '-----------------------------------------------------------------------\n'
# ----------------------------------------------------------------------
p = TextProgress()
# ----------------------------------------------------------------------
# Load input data
# ----------------------------------------------------------------------
arguments = {}
arguments['readername'] = readername
arguments['filename'] = fileinput
arguments['authorsfilename'] = authorsinput
arguments['progress'] = p
reader = Reader( arguments )
# ----------------------------------------------------------------------
# Write output data (with default parameters)
# ----------------------------------------------------------------------
# Create preferences
prefs = Preferences()
theme = all_themes.get_theme(themename.lower())
prefs.SetTheme( theme )
prefs.SetValue('COMPILER', 'str', compiler.strip())
# Create the Writer
writer = Writer( reader.docs )
writer.set_status( status )
writer.set_progress( p )
# Write abstracts as LaTeX
if exporttex:
writer.writeLaTeX_as_Dir( output, prefs, tocompile=True )
# Write proceed native CSV files
if exportcsv:
writer.writeCSV( output )
# Write html file
if exporthtml:
writer.writeHTML( output+".html" )
# Done
try:
term = TerminalController()
print term.render('${GREEN}-----------------------------------------------------------------------${NORMAL}')
print term.render('${RED}Result is in '+output)
print term.render('${GREEN}Thank you for using '+program+".")
print term.render('${GREEN}-----------------------------------------------------------------------${NORMAL}\n')
except:
print ('-----------------------------------------------------------------------\n')
print "Result is in "+output+".\nThank you for using "+program+"."
print ('-----------------------------------------------------------------------\n')
# ----------------------------------------------------------------------
|
gpl-3.0
| -6,585,036,941,953,271,000
| 36.044776
| 119
| 0.463638
| false
| 4.512727
| false
| false
| false
|
zvolsky/muzika
|
models/menu.py
|
1
|
6740
|
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## Customize your APP title, subtitle and menus here
#########################################################################
response.logo = A(B('web',SPAN(2),'py'),XML('™ '),
_class="brand",_href="http://www.web2py.com/")
response.title = request.application.replace('_',' ').title()
response.subtitle = ''
## read more at http://dev.w3.org/html5/markup/meta.name.html
response.meta.author = 'Your Name <you@example.com>'
response.meta.keywords = 'web2py, python, framework'
response.meta.generator = 'Web2py Web Framework'
## your http://google.com/analytics id
response.google_analytics_id = None
#########################################################################
## this is the main application menu add/remove items as required
#########################################################################
response.menu = [
(T('Rozpis'), False, URL('default', 'index'), [
(T('Můj rozpis'), False, URL('default', 'index'), []),
]),
]
if auth.has_membership('rozpis'):
response.menu[0][3].append((T('Celkový rozpis'), False, URL('default', 'rozpis'), []))
if auth.has_membership('admin'):
response.menu.append((T('Číselníky'), False, None, [
(T('Práva uživatelů'), False, URL('plugin_manage_groups', 'index'), []),
(T('Muzikanti (uživatelé)'), False, URL('default', 'muzikanti'), []),
(T('Místa'), False, URL('default', 'mista'), []),
]))
DEVELOPMENT_MENU = True
#########################################################################
## provide shortcuts for development. remove in production
#########################################################################
def _():
# shortcuts
app = request.application
ctr = request.controller
# useful links to internal and external resources
response.menu += [
(SPAN('web2py', _class='highlighted'), False, 'http://web2py.com', [
(T('My Sites'), False, URL('admin', 'default', 'site')),
(T('This App'), False, URL('admin', 'default', 'design/%s' % app), [
(T('Controller'), False,
URL(
'admin', 'default', 'edit/%s/controllers/%s.py' % (app, ctr))),
(T('View'), False,
URL(
'admin', 'default', 'edit/%s/views/%s' % (app, response.view))),
(T('Layout'), False,
URL(
'admin', 'default', 'edit/%s/views/layout.html' % app)),
(T('Stylesheet'), False,
URL(
'admin', 'default', 'edit/%s/static/css/web2py.css' % app)),
(T('DB Model'), False,
URL(
'admin', 'default', 'edit/%s/models/db.py' % app)),
(T('Menu Model'), False,
URL(
'admin', 'default', 'edit/%s/models/menu.py' % app)),
(T('Database'), False, URL(app, 'appadmin', 'index')),
(T('Errors'), False, URL(
'admin', 'default', 'errors/' + app)),
(T('About'), False, URL(
'admin', 'default', 'about/' + app)),
]),
('web2py.com', False, 'http://www.web2py.com', [
(T('Download'), False,
'http://www.web2py.com/examples/default/download'),
(T('Support'), False,
'http://www.web2py.com/examples/default/support'),
(T('Demo'), False, 'http://web2py.com/demo_admin'),
(T('Quick Examples'), False,
'http://web2py.com/examples/default/examples'),
(T('FAQ'), False, 'http://web2py.com/AlterEgo'),
(T('Videos'), False,
'http://www.web2py.com/examples/default/videos/'),
(T('Free Applications'),
False, 'http://web2py.com/appliances'),
(T('Plugins'), False, 'http://web2py.com/plugins'),
(T('Layouts'), False, 'http://web2py.com/layouts'),
(T('Recipes'), False, 'http://web2pyslices.com/'),
(T('Semantic'), False, 'http://web2py.com/semantic'),
]),
(T('Documentation'), False, 'http://www.web2py.com/book', [
(T('Preface'), False,
'http://www.web2py.com/book/default/chapter/00'),
(T('Introduction'), False,
'http://www.web2py.com/book/default/chapter/01'),
(T('Python'), False,
'http://www.web2py.com/book/default/chapter/02'),
(T('Overview'), False,
'http://www.web2py.com/book/default/chapter/03'),
(T('The Core'), False,
'http://www.web2py.com/book/default/chapter/04'),
(T('The Views'), False,
'http://www.web2py.com/book/default/chapter/05'),
(T('Database'), False,
'http://www.web2py.com/book/default/chapter/06'),
(T('Forms and Validators'), False,
'http://www.web2py.com/book/default/chapter/07'),
(T('Email and SMS'), False,
'http://www.web2py.com/book/default/chapter/08'),
(T('Access Control'), False,
'http://www.web2py.com/book/default/chapter/09'),
(T('Services'), False,
'http://www.web2py.com/book/default/chapter/10'),
(T('Ajax Recipes'), False,
'http://www.web2py.com/book/default/chapter/11'),
(T('Components and Plugins'), False,
'http://www.web2py.com/book/default/chapter/12'),
(T('Deployment Recipes'), False,
'http://www.web2py.com/book/default/chapter/13'),
(T('Other Recipes'), False,
'http://www.web2py.com/book/default/chapter/14'),
(T('Buy this book'), False,
'http://stores.lulu.com/web2py'),
]),
(T('Community'), False, None, [
(T('Groups'), False,
'http://www.web2py.com/examples/default/usergroups'),
(T('Twitter'), False, 'http://twitter.com/web2py'),
(T('Live Chat'), False,
'http://webchat.freenode.net/?channels=web2py'),
]),
(T('Plugins'), False, None, [
('plugin_wiki', False,
'http://web2py.com/examples/default/download'),
(T('Other Plugins'), False,
'http://web2py.com/plugins'),
(T('Layout Plugins'),
False, 'http://web2py.com/layouts'),
])
]
)]
if DEVELOPMENT_MENU: _()
if "auth" in locals(): auth.wikimenu()
|
agpl-3.0
| -2,985,467,640,629,348,000
| 43.562914
| 90
| 0.480755
| false
| 3.705396
| false
| false
| false
|
dtbcoinlab/dtbcoin
|
share/qt/make_spinner.py
|
1
|
1035
|
#!/usr/bin/env python
# W.J. van der Laan, 2011
# Make spinning .mng animation from a .png
# Requires imagemagick 6.7+
from __future__ import division
from os import path
from PIL import Image
from subprocess import Popen
DTC='img/reload_scaled.png'
DST='../../src/qt/res/movies/update_spinner.mng'
TMPDIR='/tmp'
TMPNAME='tmp-%03i.png'
NUMFRAMES=35
FRAMERATE=10.0
CONVERT='convert'
CLOCKWISE=True
DSIZE=(16,16)
im_src = Image.open(DTC)
if CLOCKWISE:
im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT)
def frame_to_filename(frame):
return path.join(TMPDIR, TMPNAME % frame)
frame_files = []
for frame in xrange(NUMFRAMES):
rotation = (frame + 0.5) / NUMFRAMES * 360.0
if CLOCKWISE:
rotation = -rotation
im_new = im_src.rotate(rotation, Image.BICUBIC)
im_new.thumbnail(DSIZE, Image.ANTIALIAS)
outfile = frame_to_filename(frame)
im_new.save(outfile, 'png')
frame_files.append(outfile)
p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST])
p.communicate()
|
mit
| 131,144,194,758,330,020
| 23.069767
| 85
| 0.691787
| false
| 2.820163
| false
| false
| false
|
Kapiche/gcloud-datastore-oem
|
gcloudoem/exceptions.py
|
1
|
8700
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# CHANGED BY Kapiche Ltd.
# Copyright 2015 Kapiche Ltd. All rights reserved.
# Based on work by the good folk responsible for gcloud-python. Thanks folks!
#
"""
Custom exceptions.
"""
from collections import defaultdict
import json
import six
_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module
class GCloudError(Exception):
"""Base error class for gcloud errors (abstract).
Each subclass represents a single type of HTTP error response.
"""
code = None
"""HTTP status code. Concrete subclasses *must* define.
See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
"""
def __init__(self, message, errors=()):
super(GCloudError, self).__init__()
# suppress deprecation warning under 2.6.x
self.message = message
self._errors = [error.copy() for error in errors]
def __str__(self):
return '%d %s' % (self.code, self.message)
@property
def errors(self):
"""Detailed error information.
:rtype: list(dict)
:returns: a list of mappings describing each error.
"""
return [error.copy() for error in self._errors]
class Redirection(GCloudError):
"""Base for 3xx responses
This class is abstract.
"""
class MovedPermanently(Redirection):
"""Exception mapping a '301 Moved Permanently' response."""
code = 301
class NotModified(Redirection):
"""Exception mapping a '304 Not Modified' response."""
code = 304
class TemporaryRedirect(Redirection):
"""Exception mapping a '307 Temporary Redirect' response."""
code = 307
class ResumeIncomplete(Redirection):
"""Exception mapping a '308 Resume Incomplete' response."""
code = 308
class ClientError(GCloudError):
"""Base for 4xx responses
This class is abstract
"""
class BadRequest(ClientError):
"""Exception mapping a '400 Bad Request' response."""
code = 400
class Unauthorized(ClientError):
"""Exception mapping a '401 Unauthorized' response."""
code = 401
class Forbidden(ClientError):
"""Exception mapping a '403 Forbidden' response."""
code = 403
class NotFound(ClientError):
"""Exception mapping a '404 Not Found' response."""
code = 404
class MethodNotAllowed(ClientError):
"""Exception mapping a '405 Method Not Allowed' response."""
code = 405
class Conflict(ClientError):
"""Exception mapping a '409 Conflict' response."""
code = 409
class LengthRequired(ClientError):
"""Exception mapping a '411 Length Required' response."""
code = 411
class PreconditionFailed(ClientError):
"""Exception mapping a '412 Precondition Failed' response."""
code = 412
class RequestRangeNotSatisfiable(ClientError):
"""Exception mapping a '416 Request Range Not Satisfiable' response."""
code = 416
class TooManyRequests(ClientError):
"""Exception mapping a '429 Too Many Requests' response."""
code = 429
class ServerError(GCloudError):
"""Base for 5xx responses: (abstract)"""
class InternalServerError(ServerError):
"""Exception mapping a '500 Internal Server Error' response."""
code = 500
class NotImplemented(ServerError):
"""Exception mapping a '501 Not Implemented' response."""
code = 501
class ServiceUnavailable(ServerError):
"""Exception mapping a '503 Service Unavailable' response."""
code = 503
def make_exception(response, content, use_json=True):
"""
Factory: create exception based on HTTP response code.
:type response: :class:`httplib2.Response` or other HTTP response object
:param response: A response object that defines a status code as the status attribute.
:type content: string or dictionary
:param content: The body of the HTTP error response.
:type use_json: boolean
:param use_json: Flag indicating if ``content`` is expected to be JSON.
:rtype: instance of :class:`GCloudError`, or a concrete subclass.
:returns: Exception specific to the error response.
"""
message = content
errors = ()
if isinstance(content, str):
if use_json:
payload = json.loads(content)
else:
payload = {}
else:
payload = content
message = payload.get('message', message)
errors = payload.get('error', {}).get('errors', ())
try:
klass = _HTTP_CODE_TO_EXCEPTION[response.status]
except KeyError:
error = GCloudError(message, errors)
error.code = response.status
else:
error = klass(message, errors)
return error
def _walk_subclasses(klass):
"""Recursively walk subclass tree."""
for sub in klass.__subclasses__():
yield sub
for subsub in _walk_subclasses(sub):
yield subsub
# Build the code->exception class mapping.
for eklass in _walk_subclasses(GCloudError):
code = getattr(eklass, 'code', None)
if code is not None:
_HTTP_CODE_TO_EXCEPTION[code] = eklass
class ValidationError(AssertionError):
"""
Validation exception.
May represent an error validating a field or a document containing fields with validation errors.
:ivar errors: A dictionary of errors for fields within this document or list, or None if the error is for an
individual field.
"""
errors = {}
field_name = None
_message = None
def __init__(self, message="", **kwargs):
self.errors = kwargs.get('errors', {})
self.field_name = kwargs.get('field_name', None)
self.message = message
def __str__(self):
return six.text_type(self.message)
def __repr__(self):
return '%s(%s,)' % (self.__class__.__name__, self.message)
def __getattribute__(self, name):
message = super(ValidationError, self).__getattribute__(name)
if name == 'message':
if self.field_name:
message = '%s' % message
if self.errors:
message = '%s(%s)' % (message, self._format_errors())
return message
def _get_message(self):
return self._message
def _set_message(self, message):
self._message = message
message = property(_get_message, _set_message)
def to_dict(self):
"""
Returns a dictionary of all errors within a entity.
Keys are field names or list indices and values are the validation error messages, or a nested dictionary of
errors for an embedded document or list.
"""
def build_dict(source):
errors_dict = {}
if not source:
return errors_dict
if isinstance(source, dict):
for field_name, error in source.items():
errors_dict[field_name] = build_dict(error)
elif isinstance(source, ValidationError) and source.errors:
return build_dict(source.errors)
else:
return six.text_type(source)
return errors_dict
if not self.errors:
return {}
return build_dict(self.errors)
def _format_errors(self):
"""Returns a string listing all errors within a document"""
def generate_key(value, prefix=''):
if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value])
if isinstance(value, dict):
value = ' '.join(
[generate_key(v, k) for k, v in value.items()])
results = "%s.%s" % (prefix, value) if prefix else value
return results
error_dict = defaultdict(list)
for k, v in self.to_dict().items():
error_dict[generate_key(v)].append(k)
return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.items()])
class InvalidQueryError(Exception):
"""Invalid Datastore query."""
pass
class EnvironmentError(Exception):
"""Generally means that connect() wasn't called."""
pass
class DoesNotExist(Exception):
pass
class MultipleObjectsReturned(Exception):
pass
class ConnectionError(Exception):
pass
|
apache-2.0
| -6,143,890,875,748,414,000
| 25.934985
| 116
| 0.63931
| false
| 4.270987
| false
| false
| false
|
llekn/ffado
|
admin/pyuic4.py
|
1
|
1532
|
#!/usr/bin/python
#
# Copyright (C) 2007-2008 Arnold Krille
#
# This file is part of FFADO
# FFADO = Free Firewire (pro-)audio drivers for linux
#
# FFADO is based upon FreeBoB.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import imp
def pyuic4_action( target, source, env ):
env.Execute( "pyuic4 " + str( source[0] ) + " > " + str( target[0] ) )
return 0
def pyuic4_string( target, source, env ):
return "building '%s' from '%s'" % ( str(target[0]), str( source[0] ) )
def PyQt4Check( context ):
context.Message( "Checking for pyuic4 (by checking for the python module pyqtconfig) " )
ret = True
try:
imp.find_module( "pyqtconfig" )
except ImportError:
ret = False
context.Result( ret )
return ret
def generate( env, **kw ):
env['BUILDERS']['PyUIC4'] = env.Builder( action=pyuic4_action, src_suffix=".ui", single_source=True )
env['PYUIC4_TESTS'] = { "PyQt4Check" : PyQt4Check }
def exists( env ):
return 1
|
gpl-2.0
| -256,141,468,672,272,600
| 29.64
| 102
| 0.706266
| false
| 3.171843
| false
| false
| false
|
moto-timo/ironpython3
|
Src/Scripts/generate_calls.py
|
1
|
26135
|
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
import sys
from generate import generate
MAX_ARGS = 16
def make_params(nargs, *prefix):
params = ["object arg%d" % i for i in range(nargs)]
return ", ".join(list(prefix) + params)
def make_params1(nargs, prefix=("CodeContext context",)):
params = ["object arg%d" % i for i in range(nargs)]
return ", ".join(list(prefix) + params)
def make_args(nargs, *prefix):
params = ["arg%d" % i for i in range(nargs)]
return ", ".join(list(prefix) + params)
def make_args1(nargs, prefix, start=0):
args = ["arg%d" % i for i in range(start, nargs)]
return ", ".join(list(prefix) + args)
def make_calltarget_type_args(nargs):
return ', '.join(['PythonFunction'] + ['object'] * (nargs + 1))
def gen_args_comma(nparams, comma):
args = ""
for i in range(nparams):
args = args + comma + ("object arg%d" % i)
comma = ", "
return args
def gen_args(nparams):
return gen_args_comma(nparams, "")
def gen_args_call(nparams, *prefix):
args = ""
comma = ""
for i in range(nparams):
args = args + comma +("arg%d" % i)
comma = ", "
if prefix:
if args:
args = prefix[0] + ', ' + args
else:
args = prefix[0]
return args
def gen_args_array(nparams):
args = gen_args_call(nparams)
if args: return "{ " + args + " }"
else: return "{ }"
def gen_callargs(nparams):
args = ""
comma = ""
for i in range(nparams):
args = args + comma + ("callArgs[%d]" % i)
comma = ","
return args
def gen_args_paramscall(nparams):
args = ""
comma = ""
for i in range(nparams):
args = args + comma + ("args[%d]" % i)
comma = ","
return args
method_caller_template = """
class MethodBinding<%(typeParams)s> : BaseMethodBinding {
private CallSite<Func<CallSite, CodeContext, object, object, %(typeParams)s, object>> _site;
public MethodBinding(PythonInvokeBinder binder) {
_site = CallSite<Func<CallSite, CodeContext, object, object, %(typeParams)s, object>>.Create(binder);
}
public object SelfTarget(CallSite site, CodeContext context, object target, %(callParams)s) {
Method self = target as Method;
if (self != null && self._inst != null) {
return _site.Target(_site, context, self._func, self._inst, %(callArgs)s);
}
return ((CallSite<Func<CallSite, CodeContext, object, %(typeParams)s, object>>)site).Update(site, context, target, %(callArgs)s);
}
public object SelflessTarget(CallSite site, CodeContext context, object target, object arg0, %(callParamsSelfless)s) {
Method self = target as Method;
if (self != null && self._inst == null) {
return _site.Target(_site, context, self._func, PythonOps.MethodCheckSelf(context, self, arg0), %(callArgsSelfless)s);
}
return ((CallSite<Func<CallSite, CodeContext, object, object, %(typeParams)s, object>>)site).Update(site, context, target, arg0, %(callArgsSelfless)s);
}
public override Delegate GetSelfTarget() {
return new Func<CallSite, CodeContext, object, %(typeParams)s, object>(SelfTarget);
}
public override Delegate GetSelflessTarget() {
return new Func<CallSite, CodeContext, object, object, %(typeParams)s, object>(SelflessTarget);
}
}"""
def method_callers(cw):
for nparams in range(1, MAX_ARGS-3):
cw.write(method_caller_template % {
'typeParams' : ', '.join(('T%d' % d for d in range(nparams))),
'callParams': ', '.join(('T%d arg%d' % (d,d) for d in range(nparams))),
'callParamsSelfless': ', '.join(('T%d arg%d' % (d,d+1) for d in range(nparams))),
'callArgsSelfless' : ', '.join(('arg%d' % (d+1) for d in range(nparams))),
'argCount' : nparams,
'callArgs': ', '.join(('arg%d' % d for d in range(nparams))),
'genFuncArgs' : make_calltarget_type_args(nparams),
})
def selfless_method_caller_switch(cw):
cw.enter_block('switch (typeArgs.Length)')
for i in range(1, MAX_ARGS-3):
cw.write('case %d: binding = (BaseMethodBinding)Activator.CreateInstance(typeof(MethodBinding<%s>).MakeGenericType(typeArgs), binder); break;' % (i, ',' * (i-1)))
cw.exit_block()
function_caller_template = """
public sealed class FunctionCaller<%(typeParams)s> : FunctionCaller {
public FunctionCaller(int compat) : base(compat) { }
public object Call%(argCount)d(CallSite site, CodeContext context, object func, %(callParams)s) {
PythonFunction pyfunc = func as PythonFunction;
if (pyfunc != null && pyfunc._compat == _compat) {
return ((Func<%(genFuncArgs)s>)pyfunc.__code__.Target)(pyfunc, %(callArgs)s);
}
return ((CallSite<Func<CallSite, CodeContext, object, %(typeParams)s, object>>)site).Update(site, context, func, %(callArgs)s);
}"""
defaults_template = """
public object Default%(defaultCount)dCall%(argCount)d(CallSite site, CodeContext context, object func, %(callParams)s) {
PythonFunction pyfunc = func as PythonFunction;
if (pyfunc != null && pyfunc._compat == _compat) {
int defaultIndex = pyfunc.Defaults.Length - pyfunc.NormalArgumentCount + %(argCount)d;
return ((Func<%(genFuncArgs)s>)pyfunc.__code__.Target)(pyfunc, %(callArgs)s, %(defaultArgs)s);
}
return ((CallSite<Func<CallSite, CodeContext, object, %(typeParams)s, object>>)site).Update(site, context, func, %(callArgs)s);
}"""
defaults_template_0 = """
public object Default%(argCount)dCall0(CallSite site, CodeContext context, object func) {
PythonFunction pyfunc = func as PythonFunction;
if (pyfunc != null && pyfunc._compat == _compat) {
int defaultIndex = pyfunc.Defaults.Length - pyfunc.NormalArgumentCount;
return ((Func<%(genFuncArgs)s>)pyfunc.__code__.Target)(pyfunc, %(defaultArgs)s);
}
return ((CallSite<Func<CallSite, CodeContext, object, object>>)site).Update(site, context, func);
}"""
def function_callers(cw):
cw.write('''class FunctionCallerProperties {
internal const int MaxGeneratedFunctionArgs = %d;
}''' % (MAX_ARGS-2))
cw.write('')
for nparams in range(1, MAX_ARGS-2):
cw.write(function_caller_template % {
'typeParams' : ', '.join(('T%d' % d for d in range(nparams))),
'callParams': ', '.join(('T%d arg%d' % (d,d) for d in range(nparams))),
'argCount' : nparams,
'callArgs': ', '.join(('arg%d' % d for d in range(nparams))),
'genFuncArgs' : make_calltarget_type_args(nparams),
})
for i in range(nparams + 1, MAX_ARGS - 2):
cw.write(defaults_template % {
'typeParams' : ', '.join(('T%d' % d for d in range(nparams))),
'callParams': ', '.join(('T%d arg%d' % (d,d) for d in range(nparams))),
'argCount' : nparams,
'totalParamCount' : i,
'callArgs': ', '.join(('arg%d' % d for d in range(nparams))),
'defaultCount' : i - nparams,
'defaultArgs' : ', '.join(('pyfunc.Defaults[defaultIndex + %d]' % curDefault for curDefault in range(i - nparams))),
'genFuncArgs' : make_calltarget_type_args(i),
})
cw.write('}')
def function_callers_0(cw):
for i in range(1, MAX_ARGS - 2):
cw.write(defaults_template_0 % {
'argCount' : i,
'defaultArgs' : ', '.join(('pyfunc.Defaults[defaultIndex + %d]' % curDefault for curDefault in range(i))),
'genFuncArgs' : make_calltarget_type_args(i),
})
function_caller_switch_template = """case %(argCount)d:
callerType = typeof(FunctionCaller<%(arity)s>).MakeGenericType(typeParams);
mi = callerType.GetMethod(baseName + "Call%(argCount)d");
Debug.Assert(mi != null);
fc = GetFunctionCaller(callerType, funcCompat);
funcType = typeof(Func<,,,,%(arity)s>).MakeGenericType(allParams);
return new Binding.FastBindResult<T>((T)(object)mi.CreateDelegate(funcType, fc), true);"""
def function_caller_switch(cw):
for nparams in range(1, MAX_ARGS-2):
cw.write(function_caller_switch_template % {
'arity' : ',' * (nparams - 1),
'argCount' : nparams,
})
def gen_lazy_call_targets(cw):
for nparams in range(MAX_ARGS):
cw.enter_block("public static object OriginalCallTarget%d(%s)" % (nparams, make_params(nparams, "PythonFunction function")))
cw.write("function.__code__.LazyCompileFirstTarget(function);")
cw.write("return ((Func<%s>)function.__code__.Target)(%s);" % (make_calltarget_type_args(nparams), gen_args_call(nparams, 'function')))
cw.exit_block()
cw.write('')
def gen_recursion_checks(cw):
for nparams in range(MAX_ARGS):
cw.enter_block("internal class PythonFunctionRecursionCheck%d" % (nparams, ))
cw.write("private readonly Func<%s> _target;" % (make_calltarget_type_args(nparams), ))
cw.write('')
cw.enter_block('public PythonFunctionRecursionCheck%d(Func<%s> target)' % (nparams, make_calltarget_type_args(nparams)))
cw.write('_target = target;')
cw.exit_block()
cw.write('')
cw.enter_block('public object CallTarget(%s)' % (make_params(nparams, "PythonFunction/*!*/ function"), ))
cw.write('PythonOps.FunctionPushFrame((PythonContext)function.Context.LanguageContext);')
cw.enter_block('try')
cw.write('return _target(%s);' % (gen_args_call(nparams, 'function'), ))
cw.finally_block()
cw.write('PythonOps.FunctionPopFrame();')
cw.exit_block()
cw.exit_block()
cw.exit_block()
cw.write('')
def gen_recursion_delegate_switch(cw):
for nparams in range(MAX_ARGS):
cw.case_label('case %d:' % nparams)
cw.write('finalTarget = new Func<%s>(new PythonFunctionRecursionCheck%d((Func<%s>)finalTarget).CallTarget);' % (make_calltarget_type_args(nparams), nparams, make_calltarget_type_args(nparams)))
cw.write('break;')
cw.dedent()
def get_call_type(postfix):
if postfix == "": return "CallType.None"
else: return "CallType.ImplicitInstance"
def make_call_to_target(cw, index, postfix, extraArg):
cw.enter_block("public override object Call%(postfix)s(%(params)s)", postfix=postfix,
params=make_params1(index))
cw.write("if (target%(index)d != null) return target%(index)d(%(args)s);", index=index,
args = make_args1(index, extraArg))
cw.write("throw BadArgumentError(%(callType)s, %(nargs)d);", callType=get_call_type(postfix), nargs=index)
cw.exit_block()
def make_call_to_targetX(cw, index, postfix, extraArg):
cw.enter_block("public override object Call%(postfix)s(%(params)s)", postfix=postfix,
params=make_params1(index))
cw.write("return target%(index)d(%(args)s);", index=index, args = make_args1(index, extraArg))
cw.exit_block()
def make_error_calls(cw, index):
cw.enter_block("public override object Call(%(params)s)", params=make_params1(index))
cw.write("throw BadArgumentError(CallType.None, %(nargs)d);", nargs=index)
cw.exit_block()
if index > 0:
cw.enter_block("public override object CallInstance(%(params)s)", params=make_params1(index))
cw.write("throw BadArgumentError(CallType.ImplicitInstance, %(nargs)d);", nargs=index)
cw.exit_block()
def gen_call(nargs, nparams, cw, extra=[]):
args = extra + ["arg%d" % i for i in range(nargs)]
cw.enter_block("public override object Call(%s)" % make_params1(nargs))
# first emit error checking...
ndefaults = nparams-nargs
if nargs != nparams:
cw.write("if (Defaults.Length < %d) throw BadArgumentError(%d);" % (ndefaults,nargs))
# emit the common case of no recursion check
if (nargs == nparams):
cw.write("if (!EnforceRecursion) return target(%s);" % ", ".join(args))
else:
dargs = args + ["Defaults[Defaults.Length - %d]" % i for i in range(ndefaults, 0, -1)]
cw.write("if (!EnforceRecursion) return target(%s);" % ", ".join(dargs))
# emit non-common case of recursion check
cw.write("PushFrame();")
cw.enter_block("try")
# make function body
if (nargs == nparams):
cw.write("return target(%s);" % ", ".join(args))
else:
dargs = args + ["Defaults[Defaults.Length - %d]" % i for i in range(ndefaults, 0, -1)]
cw.write("return target(%s);" % ", ".join(dargs))
cw.finally_block()
cw.write("PopFrame();")
cw.exit_block()
cw.exit_block()
def gen_params_callN(cw, any):
cw.enter_block("public override object Call(CodeContext context, params object[] args)")
cw.write("if (!IsContextAware) return Call(args);")
cw.write("")
cw.enter_block("if (Instance == null)")
cw.write("object[] newArgs = new object[args.Length + 1];")
cw.write("newArgs[0] = context;")
cw.write("Array.Copy(args, 0, newArgs, 1, args.Length);")
cw.write("return Call(newArgs);")
cw.else_block()
# need to call w/ Context, Instance, *args
if any:
cw.enter_block("switch (args.Length)")
for i in range(MAX_ARGS-1):
if i == 0:
cw.write(("case %d: if(target2 != null) return target2(context, Instance); break;") % (i))
else:
cw.write(("case %d: if(target%d != null) return target%d(context, Instance, " + gen_args_paramscall(i) + "); break;") % (i, i+2, i+2))
cw.exit_block()
cw.enter_block("if (targetN != null)")
cw.write("object [] newArgs = new object[args.Length+2];")
cw.write("newArgs[0] = context;")
cw.write("newArgs[1] = Instance;")
cw.write("Array.Copy(args, 0, newArgs, 2, args.Length);")
cw.write("return targetN(newArgs);")
cw.exit_block()
cw.write("throw BadArgumentError(args.Length);")
cw.exit_block()
else:
cw.write("object [] newArgs = new object[args.Length+2];")
cw.write("newArgs[0] = context;")
cw.write("newArgs[1] = Instance;")
cw.write("Array.Copy(args, 0, newArgs, 2, args.Length);")
cw.write("return target(newArgs);")
cw.exit_block()
cw.exit_block()
cw.write("")
CODE = """
public static object Call(%(params)s) {
FastCallable fc = func as FastCallable;
if (fc != null) return fc.Call(%(args)s);
return PythonCalls.Call(func, %(argsArray)s);
}"""
def gen_python_switch(cw):
for nparams in range(MAX_ARGS):
genArgs = make_calltarget_type_args(nparams)
cw.write("""case %d:
originalTarget = (Func<%s>)OriginalCallTarget%d;
return typeof(Func<%s>);""" % (nparams, genArgs, nparams, genArgs))
fast_type_call_template = """
class FastBindingBuilder<%(typeParams)s> : FastBindingBuilderBase {
public FastBindingBuilder(CodeContext context, PythonType type, PythonInvokeBinder binder, Type siteType, Type[] genTypeArgs) :
base(context, type, binder, siteType, genTypeArgs) {
}
protected override Delegate GetNewSiteDelegate(PythonInvokeBinder binder, object func) {
return new Func<%(newInitDlgParams)s>(new NewSite<%(typeParams)s>(binder, func).Call);
}
protected override Delegate MakeDelegate(int version, Delegate newDlg, LateBoundInitBinder initBinder) {
return new Func<%(funcParams)s>(
new FastTypeSite<%(typeParams)s>(
version,
(Func<%(newInitDlgParams)s>)newDlg,
initBinder
).CallTarget
);
}
}
class FastTypeSite<%(typeParams)s> {
private readonly int _version;
private readonly Func<%(newInitDlgParams)s> _new;
private readonly CallSite<Func<%(nestedSlowSiteParams)s>> _initSite;
public FastTypeSite(int version, Func<%(newInitDlgParams)s> @new, LateBoundInitBinder initBinder) {
_version = version;
_new = @new;
_initSite = CallSite<Func<%(nestedSlowSiteParams)s>>.Create(initBinder);
}
public object CallTarget(CallSite site, CodeContext context, object type, %(callTargetArgs)s) {
PythonType pt = type as PythonType;
if (pt != null && pt.Version == _version) {
object res = _new(context, type, %(callTargetPassedArgs)s);
_initSite.Target(_initSite, context, res, %(callTargetPassedArgs)s);
return res;
}
return ((CallSite<Func<%(funcParams)s>>)site).Update(site, context, type, %(callTargetPassedArgs)s);
}
}
class NewSite<%(typeParams)s> {
private readonly CallSite<Func<%(nestedSiteParams)s>> _site;
private readonly object _target;
public NewSite(PythonInvokeBinder binder, object target) {
_site = CallSite<Func<%(nestedSiteParams)s>>.Create(binder);
_target = target;
}
public object Call(CodeContext context, object typeOrInstance, %(callTargetArgs)s) {
return _site.Target(_site, context, _target, typeOrInstance, %(callTargetPassedArgs)s);
}
}
"""
def gen_fast_type_callers(cw):
for nparams in range(1, 6):
funcParams = 'CallSite, CodeContext, object, ' + ', '.join(('T%d' % d for d in range(nparams))) + ', object'
newInitDlgParams = 'CodeContext, object, ' + ', '.join(('T%d' % d for d in range(nparams))) + ', object'
callTargetArgs = ', '.join(('T%d arg%d' % (d, d) for d in range(nparams)))
callTargetPassedArgs = ', '.join(('arg%d' % (d, ) for d in range(nparams)))
nestedSiteParams = 'CallSite, CodeContext, object, object, ' + ', '.join(('T%d' % d for d in range(nparams))) + ', object'
nestedSlowSiteParams = 'CallSite, CodeContext, object, ' + ', '.join(('T%d' % d for d in range(nparams))) + ', object'
cw.write(fast_type_call_template % {
'typeParams' : ', '.join(('T%d' % d for d in range(nparams))),
'funcParams' : funcParams,
'newInitDlgParams' : newInitDlgParams,
'callTargetArgs' : callTargetArgs,
'callTargetPassedArgs': callTargetPassedArgs,
'nestedSiteParams' : nestedSiteParams,
'nestedSlowSiteParams' : nestedSlowSiteParams,
})
def gen_fast_type_caller_switch(cw):
for nparams in range(1, 6):
cw.write('case %d: baseType = typeof(FastBindingBuilder<%s>); break;' % (nparams, (',' * (nparams - 1))))
fast_init_template = """
class FastInitSite<%(typeParams)s> {
private readonly int _version;
private readonly PythonFunction _slot;
private readonly CallSite<Func<CallSite, CodeContext, PythonFunction, object, %(typeParams)s, object>> _initSite;
public FastInitSite(int version, PythonInvokeBinder binder, PythonFunction target) {
_version = version;
_slot = target;
_initSite = CallSite<Func<CallSite, CodeContext, PythonFunction, object, %(typeParams)s, object>>.Create(binder);
}
public object CallTarget(CallSite site, CodeContext context, object inst, %(callParams)s) {
IPythonObject pyObj = inst as IPythonObject;
if (pyObj != null && pyObj.PythonType.Version == _version) {
_initSite.Target(_initSite, context, _slot, inst, %(callArgs)s);
return inst;
}
return ((CallSite<Func<CallSite, CodeContext, object, %(typeParams)s, object>>)site).Update(site, context, inst, %(callArgs)s);
}
public object EmptyCallTarget(CallSite site, CodeContext context, object inst, %(callParams)s) {
IPythonObject pyObj = inst as IPythonObject;
if ((pyObj != null && pyObj.PythonType.Version == _version) || DynamicHelpers.GetPythonType(inst).Version == _version) {
return inst;
}
return ((CallSite<Func<CallSite, CodeContext, object, %(typeParams)s, object>>)site).Update(site, context, inst, %(callArgs)s);
}
}
"""
MAX_FAST_INIT_ARGS = 6
def gen_fast_init_callers(cw):
for nparams in range(1, MAX_FAST_INIT_ARGS):
callParams = ', '.join(('T%d arg%d' % (d, d) for d in range(nparams)))
callArgs = ', '.join(('arg%d' % (d, ) for d in range(nparams)))
cw.write(fast_init_template % {
'typeParams' : ', '.join(('T%d' % d for d in range(nparams))),
'callParams' : callParams,
'callArgs': callArgs,
})
def gen_fast_init_switch(cw):
for nparams in range(1, MAX_FAST_INIT_ARGS):
cw.write("case %d: initSiteType = typeof(FastInitSite<%s>); break;" % (nparams, ',' * (nparams-1), ))
def gen_fast_init_max_args(cw):
cw.write("public const int MaxFastLateBoundInitArgs = %d;" % MAX_FAST_INIT_ARGS)
MAX_INSTRUCTION_PROVIDED_CALLS = 7
def gen_call_expression_instruction_switch(cw):
for i in range(MAX_INSTRUCTION_PROVIDED_CALLS):
cw.case_label('case %d:' % i)
cw.write('compiler.Compile(Parent.LocalContext);')
cw.write('compiler.Compile(_target);')
for j in range(i):
cw.write('compiler.Compile(_args[%d].Expression);' % j)
cw.write('compiler.Instructions.Emit(new Invoke%dInstruction(Parent.PyContext));' % i)
cw.write('return;')
cw.dedent()
def gen_call_expression_instructions(cw):
for i in range(MAX_INSTRUCTION_PROVIDED_CALLS):
siteargs = 'object, ' * (i + 1)
argfetch = '\n'.join([' var arg%d = frame.Pop();' % (j-1) for j in range(i, 0, -1)])
callargs = ', '.join(['target'] + ['arg%d' % j for j in range(i)])
cw.write("""
class Invoke%(argcount)dInstruction : InvokeInstruction {
private readonly CallSite<Func<CallSite, CodeContext, %(siteargs)sobject>> _site;
public Invoke%(argcount)dInstruction(PythonContext context) {
_site = context.CallSite%(argcount)d;
}
public override int ConsumedStack {
get {
return %(consumedCount)d;
}
}
public override int Run(InterpretedFrame frame) {
%(argfetch)s
var target = frame.Pop();
frame.Push(_site.Target(_site, (CodeContext)frame.Pop(), %(callargs)s));
return +1;
}
}""" % {'siteargs': siteargs, 'argfetch' : argfetch, 'callargs' : callargs, 'argcount' : i, 'consumedCount' : i + 2 })
def gen_shared_call_sites_storage(cw):
for i in range(MAX_INSTRUCTION_PROVIDED_CALLS):
siteargs = 'object, ' * (i + 1)
cw.writeline('private CallSite<Func<CallSite, CodeContext, %sobject>> _callSite%d;' % (siteargs, i))
def gen_shared_call_sites_properties(cw):
for i in range(MAX_INSTRUCTION_PROVIDED_CALLS):
siteargs = 'object, ' * (i + 1)
cw.enter_block('internal CallSite<Func<CallSite, CodeContext, %sobject>> CallSite%d' % (siteargs, i))
cw.enter_block('get')
cw.writeline('EnsureCall%dSite();' % i)
cw.writeline('return _callSite%d;' % i)
cw.exit_block()
cw.exit_block()
cw.writeline('')
cw.enter_block('private void EnsureCall%dSite()' % i)
cw.enter_block('if (_callSite%d == null)' % i)
cw.writeline('Interlocked.CompareExchange(')
cw.indent()
cw.writeline('ref _callSite%d,' % i)
cw.writeline('CallSite<Func<CallSite, CodeContext, %sobject>>.Create(Invoke(new CallSignature(%d))),' % (siteargs, i))
cw.writeline('null')
cw.dedent()
cw.writeline(');')
cw.exit_block()
cw.exit_block()
cw.writeline('')
def main():
return generate(
("Python Selfless Method Caller Switch", selfless_method_caller_switch),
("Python Method Callers", method_callers),
("Python Shared Call Sites Properties", gen_shared_call_sites_properties),
("Python Shared Call Sites Storage", gen_shared_call_sites_storage),
("Python Call Expression Instructions", gen_call_expression_instructions),
("Python Call Expression Instruction Switch", gen_call_expression_instruction_switch),
("Python Fast Init Max Args", gen_fast_init_max_args),
("Python Fast Init Switch", gen_fast_init_switch),
("Python Fast Init Callers", gen_fast_init_callers),
("Python Fast Type Caller Switch", gen_fast_type_caller_switch),
("Python Fast Type Callers", gen_fast_type_callers),
("Python Recursion Enforcement", gen_recursion_checks),
("Python Recursion Delegate Switch", gen_recursion_delegate_switch),
("Python Lazy Call Targets", gen_lazy_call_targets),
("Python Zero Arg Function Callers", function_callers_0),
("Python Function Callers", function_callers),
("Python Function Caller Switch", function_caller_switch),
("Python Call Target Switch", gen_python_switch),
)
if __name__ == "__main__":
main()
|
apache-2.0
| 6,270,476,123,638,950,000
| 42.056013
| 201
| 0.601033
| false
| 3.54854
| false
| false
| false
|
waveform80/dbsuite
|
dbsuite/plugins/db2/zos/parser.py
|
1
|
412770
|
# vim: set et sw=4 sts=4:
# Copyright 2012 Dave Hughes.
#
# This file is part of dbsuite.
#
# dbsuite is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# dbsuite is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# dbsuite. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division,
)
from collections import namedtuple
from dbsuite.plugins.db2.zos.tokenizer import db2zos_namechars, db2zos_identchars
from dbsuite.parser import BaseParser, ParseError, ParseBacktrack, quote_str
from dbsuite.tokenizer import TokenTypes as TT, Token
# Standard size suffixes and multipliers
SUFFIX_KMG = {
'K': 1024**1,
'M': 1024**2,
'G': 1024**3,
}
# Default sizes for certain datatypes
CHAR_DEFAULT_SIZE = 1
BLOB_DEFAULT_SIZE = 1024*1024
DECIMAL_DEFAULT_SIZE = 5
DECIMAL_DEFAULT_SCALE = 0
DECFLOAT_DEFAULT_SIZE = 34
TIMESTAMP_DEFAULT_SIZE = 6
class DB2ZOSParser(BaseParser):
"""Reformatter which breaks up and re-indents DB2 for LUW's SQL dialect.
This class is, at its core, a full blown SQL language parser that
understands many common SQL DML and DDL commands (from the basic ones like
INSERT, UPDATE, DELETE, SELECT, to the more DB2 specific ones such as
CREATE TABLESPACE, CREATE FUNCTION, and dynamic compound statements).
"""
def __init__(self):
super(DB2ZOSParser, self).__init__()
self.namechars = db2zos_namechars
self.identchars = db2zos_identchars
self.current_schema = None
def _parse_init(self, tokens):
super(DB2ZOSParser, self)._parse_init(tokens)
self.current_schema = None
def _save_state(self):
# Override _save_state to save the current schema
self._states.append((
self._index,
self._level,
len(self._output),
self.current_schema
))
def _restore_state(self):
# Override _restore_state to restore the current schema
(
self._index,
self._level,
output_len,
self.current_schema
) = self._states.pop()
del self._output[output_len:]
def _parse_top(self):
# Override _parse_top to make a 'statement' the top of the parse tree
self._parse_statement()
def _prespace_default(self, template):
# Overridden to include array and set operators, and the specific
# intra-statement terminator used by func/proc definitions
return super(DB2ZOSParser, self)._prespace_default(template) and template not in (
']', '}', ';',
(TT.OPERATOR, ']'),
(TT.OPERATOR, '}'),
(TT.TERMINATOR, ';'),
)
def _postspace_default(self, template):
# Overridden to include array and set operators
return super(DB2ZOSParser, self)._postspace_default(template) and template not in (
'[', '{',
(TT.OPERATOR, '['),
(TT.OPERATOR, '{'),
)
# PATTERNS ###############################################################
def _parse_subrelation_name(self):
"""Parses the (possibly qualified) name of a relation-owned object.
A relation-owned object is either a column or a constraint. This method
parses such a name with up to two optional qualifiers (e.g., it is
possible in a SELECT statement with no table correlation clauses to
specify SCHEMA.TABLE.COLUMN). The method returns the parsed name as a
tuple with 3 elements (None is used for qualifiers which are missing).
"""
token1 = self._expect(TT.IDENTIFIER)
result = (None, None, token1.value)
if self._match('.'):
self._update_output(Token(TT.RELATION, token1.value, token1.source, token1.line, token1.column), -2)
token2 = self._expect(TT.IDENTIFIER)
result = (None, result[2], token2.value)
if self._match('.'):
self._update_output(Token(TT.SCHEMA, token1.value, token1.source, token1.line, token1.column), -4)
self._update_output(Token(TT.RELATION, token2.value, token2.source, token2.line, token2.column), -2)
token3 = self._expect(TT.IDENTIFIER)
result = (result[1], result[2], token3.value)
return result
_parse_column_name = _parse_subrelation_name
_parse_constraint_name = _parse_subrelation_name
# These are cheats; remote object names consist of server.schema.object
# instead of schema.relation.object, and source object names consist of
# schema.package.object, but they'll do
_parse_remote_object_name = _parse_subrelation_name
_parse_source_object_name = _parse_subrelation_name
# These are also cheats; routine, type and variables names as of 9.7 are
# either [schema.]routine (1 or 2-part) or schema.module.routine (3-part)
_parse_function_name = _parse_subrelation_name
_parse_procedure_name = _parse_subrelation_name
_parse_method_name = _parse_subrelation_name
_parse_type_name = _parse_subrelation_name
_parse_variable_name = _parse_subrelation_name
def _parse_subschema_name(self):
"""Parses the (possibly qualified) name of a schema-owned object.
A schema-owned object is a table, view, index, function, sequence, etc.
This method parses such a name with an optional qualifier (the schema
name). The method returns the parsed name as a tuple with 2 elements
(None is used for the schema qualifier if it is missing).
"""
token1 = self._expect(TT.RELATION)
result = (None, token1.value)
if self._match('.'):
self._update_output(Token(TT.SCHEMA, token1.value, token1.source, token1.line, token1.column), -2)
token2 = self._expect(TT.RELATION)
result = (result[1], token2.value)
return result
_parse_relation_name = _parse_subschema_name
_parse_table_name = _parse_subschema_name
_parse_view_name = _parse_subschema_name
_parse_alias_name = _parse_subschema_name
_parse_nickname_name = _parse_subschema_name
_parse_trigger_name = _parse_subschema_name
_parse_index_name = _parse_subschema_name
_parse_routine_name = _parse_subschema_name
_parse_module_name = _parse_subschema_name
_parse_sequence_name = _parse_subschema_name
# Another cheat; security labels exist within a security policy
_parse_security_label_name = _parse_subschema_name
def _parse_size(self, optional=False, suffix={}):
"""Parses a parenthesized size with an optional scale suffix.
This method parses a parenthesized integer number. The optional
parameter controls whether an exception is raised if an opening
parenthesis is not encountered at the current input position. The
suffix parameter is a dictionary mapping suffix->multiplier. The global
constant SUFFIX_KMG defines a commonly used suffix mapping (K->1024,
M->1024**2, etc.)
"""
if optional:
if not self._match('(', prespace=False):
return None
else:
self._expect('(', prespace=False)
size = self._expect(TT.NUMBER)[1]
if suffix:
suf = self._match_one_of(suffix.keys())
if suf:
size *= suffix[suf[1]]
self._expect(')')
return size
def _parse_special_register(self):
"""Parses a special register (e.g. CURRENT_DATE)"""
if self._match((TT.REGISTER, 'CURRENT')):
if self._match((TT.REGISTER, 'TIMESTAMP')):
if self._match('('):
self._expect_sequence([TT.INTEGER, ')'])
elif self._match_one_of([
(TT.REGISTER, 'CLIENT_ACCTNG'),
(TT.REGISTER, 'CLIENT_APPLNAME'),
(TT.REGISTER, 'CLIENT_USERID'),
(TT.REGISTER, 'CLIENT_WRKSTNNAME'),
(TT.REGISTER, 'DATE'),
(TT.REGISTER, 'DBPARTITIONNUM'),
(TT.REGISTER, 'DEGREE'),
(TT.REGISTER, 'ISOLATION'),
(TT.REGISTER, 'NODE'),
(TT.REGISTER, 'PATH'),
(TT.REGISTER, 'SCHEMA'),
(TT.REGISTER, 'SERVER'),
(TT.REGISTER, 'SQLID'),
(TT.REGISTER, 'TIME'),
(TT.REGISTER, 'TIMEZONE'),
(TT.REGISTER, 'USER'),
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'DECFLOAT'),
(TT.REGISTER, 'ROUNDING'),
(TT.REGISTER, 'MODE')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'DEFAULT'),
(TT.REGISTER, 'TRANSFORM'),
(TT.REGISTER, 'GROUP')
]):
pass
elif self._match((TT.REGISTER, 'EXPLAIN')):
self._expect_one_of([
(TT.REGISTER, 'MODE'),
(TT.REGISTER, 'SNAPSHOT')
])
elif self._match_sequence([
(TT.REGISTER, 'FEDERATED'),
(TT.REGISTER, 'ASYNCHRONY')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'IMPLICIT'),
(TT.REGISTER, 'XMLPARSE'),
(TT.REGISTER, 'OPTION')]
):
pass
elif self._match_sequence([
(TT.REGISTER, 'LOCALE'),
(TT.REGISTER, 'LC_MESSAGES')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'LOCALE'),
(TT.REGISTER, 'LC_TIME')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'LOCK'),
(TT.REGISTER, 'TIMEOUT')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'MAINTAINED'),
(TT.REGISTER, 'TABLE'),
(TT.REGISTER, 'TYPES'),
(TT.REGISTER, 'FOR'),
(TT.REGISTER, 'OPTIMIZATION')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'MDC'),
(TT.REGISTER, 'ROLLOUT'),
(TT.REGISTER, 'MODE')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'OPTIMIZATION'),
(TT.REGISTER, 'PROFILE')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'PACKAGE'),
(TT.REGISTER, 'PATH')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'QUERY'),
(TT.REGISTER, 'OPTIMIZATION')
]):
pass
elif self._match_sequence([
(TT.REGISTER, 'REFRESH'),
(TT.REGISTER, 'AGE')
]):
pass
else:
self._expected((TT.REGISTER,))
elif self._match((TT.REGISTER, 'CLIENT')):
self._expect_one_of([
(TT.REGISTER, 'ACCTNG'),
(TT.REGISTER, 'APPLNAME'),
(TT.REGISTER, 'USERID'),
(TT.REGISTER, 'WRKSTNNAME'),
])
elif self._match((TT.REGISTER, 'CURRENT_TIMESTAMP')):
if self._match('('):
self._expect_sequence([TT.INTEGER, ')'])
else:
self._expect_one_of([
(TT.REGISTER, 'CURRENT_DATE'),
(TT.REGISTER, 'CURRENT_PATH'),
(TT.REGISTER, 'CURRENT_SCHEMA'),
(TT.REGISTER, 'CURRENT_SERVER'),
(TT.REGISTER, 'CURRENT_TIME'),
(TT.REGISTER, 'CURRENT_TIMEZONE'),
(TT.REGISTER, 'CURRENT_USER'),
(TT.REGISTER, 'SESSION_USER'),
(TT.REGISTER, 'SYSTEM_USER'),
(TT.REGISTER, 'USER'),
])
def _parse_datatype(self):
"""Parses a (possibly qualified) data type with optional arguments.
Parses a data type name with an optional qualifier (the schema name).
The method returns a tuple with the following structure:
(schema_name, type_name, size, scale)
If the type has no parameters size and/or scale may be None. If the
schema is not specified, schema_name is None, unless the type is a
builtin type in which case the schema_name will always be 'SYSIBM'
regardless of whether a schema was specified with the type in the
source.
"""
self._save_state()
try:
# Try and parse a built-in type
typeschema = 'SYSIBM'
size = None
scale = None
# Match the optional SYSIBM prefix
if self._match((TT.DATATYPE, 'SYSIBM')):
self._expect('.')
if self._match((TT.DATATYPE, 'SMALLINT')):
typename = 'SMALLINT'
elif self._match_one_of([(TT.DATATYPE, 'INT'), (TT.DATATYPE, 'INTEGER')]):
typename = 'INTEGER'
elif self._match((TT.DATATYPE, 'BIGINT')):
typename = 'BIGINT'
elif self._match((TT.DATATYPE, 'FLOAT')):
size = self._parse_size(optional=True)
if size is None or size > 24:
typename = 'DOUBLE'
else:
typename = 'REAL'
elif self._match((TT.DATATYPE, 'REAL')):
typename = 'REAL'
elif self._match((TT.DATATYPE, 'DOUBLE')):
self._match((TT.DATATYPE, 'PRECISION'))
typename = 'DOUBLE'
elif self._match((TT.DATATYPE, 'DECFLOAT')):
typename = 'DECFLOAT'
self._parse_size(optional=True) or DECFLOAT_DEFAULT_SIZE
elif self._match_one_of([(TT.DATATYPE, 'DEC'), (TT.DATATYPE, 'DECIMAL')]):
typename = 'DECIMAL'
size = DECIMAL_DEFAULT_SIZE
scale = DECIMAL_DEFAULT_SCALE
if self._match('(', prespace=False):
size = self._expect(TT.NUMBER).value
if self._match(','):
scale = self._expect(TT.NUMBER).value
self._expect(')')
elif self._match_one_of([(TT.DATATYPE, 'NUM'), (TT.DATATYPE, 'NUMERIC')]):
typename = 'NUMERIC'
size = DECIMAL_DEFAULT_SIZE
scale = DECIMAL_DEFAULT_SCALE
if self._match('(', prespace=False):
size = self._expect(TT.NUMBER).value
if self._match(','):
scale = self._expect(TT.NUMBER).value
self._expect(')')
elif self._match_one_of([(TT.DATATYPE, 'CHAR'), (TT.DATATYPE, 'CHARACTER')]):
if self._match((TT.DATATYPE, 'VARYING')):
typename = 'VARCHAR'
size = self._parse_size(optional=False, suffix=SUFFIX_KMG)
self._match_sequence(['FOR', 'BIT', 'DATA'])
elif self._match_sequence([(TT.DATATYPE, 'LARGE'), (TT.DATATYPE, 'OBJECT')]):
typename = 'CLOB'
size = self._parse_size(optional=True, suffix=SUFFIX_KMG) or BLOB_DEFAULT_SIZE
else:
typename = 'CHAR'
size = self._parse_size(optional=True, suffix=SUFFIX_KMG) or CHAR_DEFAULT_SIZE
self._match_sequence(['FOR', 'BIT', 'DATA'])
elif self._match((TT.DATATYPE, 'VARCHAR')):
typename = 'VARCHAR'
size = self._parse_size(optional=False, suffix=SUFFIX_KMG)
self._match_sequence(['FOR', 'BIT', 'DATA'])
elif self._match((TT.DATATYPE, 'VARGRAPHIC')):
typename = 'VARGRAPHIC'
size = self._parse_size(optional=False)
elif self._match_sequence([(TT.DATATYPE, 'LONG'), (TT.DATATYPE, 'VARCHAR')]):
typename = 'LONG VARCHAR'
elif self._match_sequence([(TT.DATATYPE, 'LONG'), (TT.DATATYPE, 'VARGRAPHIC')]):
typename = 'LONG VARGRAPHIC'
elif self._match((TT.DATATYPE, 'CLOB')):
typename = 'CLOB'
size = self._parse_size(optional=True, suffix=SUFFIX_KMG) or BLOB_DEFAULT_SIZE
elif self._match((TT.DATATYPE, 'BLOB')):
typename = 'BLOB'
size = self._parse_size(optional=True, suffix=SUFFIX_KMG) or BLOB_DEFAULT_SIZE
elif self._match_sequence([(TT.DATATYPE, 'BINARY'), (TT.DATATYPE, 'LARGE'), (TT.DATATYPE, 'OBJECT')]):
typename = 'BLOB'
size = self._parse_size(optional=True, suffix=SUFFIX_KMG) or BLOB_DEFAULT_SIZE
elif self._match((TT.DATATYPE, 'DBCLOB')):
typename = 'DBCLOB'
size = self._parse_size(optional=True, suffix=SUFFIX_KMG) or BLOB_DEFAULT_SIZE
elif self._match((TT.DATATYPE, 'GRAPHIC')):
typename = 'GRAPHIC'
size = self._parse_size(optional=True) or CHAR_DEFAULT_SIZE
elif self._match((TT.DATATYPE, 'NCHAR')):
typename = 'NCHAR'
if self._match((TT.DATATYPE, 'VARYING')):
typename = 'NVARCHAR'
size = self._parse_size(optional=False)
else:
typename = 'NCHAR'
size = self._parse_size(optional=True) or CHAR_DEFAULT_SIZE
elif self._match((TT.DATATYPE, 'NATIONAL')):
self._expect_one_of([(TT.DATATYPE, 'CHAR'), (TT.DATATYPE, 'CHARACTER')])
if self._match((TT.DATATYPE, 'VARYING')):
typename = 'NVARCHAR'
size = self._parse_size(optional=False)
else:
typename = 'NCHAR'
size = self._parse_size(optional=True) or CHAR_DEFAULT_SIZE
elif self._match((TT.DATATYPE, 'DATE')):
typename = 'DATE'
elif self._match((TT.DATATYPE, 'TIME')):
typename = 'TIME'
elif self._match((TT.DATATYPE, 'TIMESTAMP')):
typename = 'TIMESTAMP'
size = self._parse_size(optional=True) or TIMESTAMP_DEFAULT_SIZE
elif self._match((TT.DATATYPE, 'DATALINK')):
typename = 'DATALINK'
size = self._parse_size(optional=True)
elif self._match((TT.DATATYPE, 'XML')):
typename = 'XML'
elif self._match((TT.DATATYPE, 'DB2SECURITYLABEL')):
typeschema = 'SYSPROC'
typename = 'DB2SECURITYLABEL'
elif self._match((TT.DATATYPE, 'BOOLEAN')):
typename = 'BOOLEAN'
elif self._match((TT.DATATYPE, 'CURSOR')):
typename = 'CURSOR'
elif self._match((TT.DATATYPE, 'ARRAY')):
typename = 'ARRAY'
size = self._parse_size(optional=False, suffix=SUFFIX_KMG)
else:
raise ParseBacktrack()
except ParseError:
# If that fails, rewind and parse a user-defined type (user defined
# types do not have a size or scale)
self._restore_state()
typeschema = None
typename = self._expect(TT.DATATYPE).value
if self._match('.'):
typeschema = typename
typename = self._expect(TT.DATATYPE).value
size = None
scale = None
else:
self._forget_state()
return (typeschema, typename, size, scale)
def _parse_ident_list(self, newlines=False):
"""Parses a comma separated list of identifiers.
This is a common pattern in SQL, for example within parentheses on the
left hand side of an assignment in an UPDATE statement, or the INCLUDE
list of a CREATE UNIQUE INDEX statement.
The method returns a list of the identifiers seen (primarily useful for
counting the number of identifiers seen, but has other uses too).
"""
result = []
while True:
ident = self._expect(TT.IDENTIFIER).value
# Parse an optional array element suffix
if self._match('[', prespace=False):
self._parse_expression()
self._expect(']')
result.append(ident)
if not self._match(','):
break
elif newlines:
self._newline()
return result
def _parse_expression_list(self, allowdefault=False, newlines=False):
"""Parses a comma separated list of expressions.
This is a common pattern in SQL, for example the parameter list of
a function, the arguments of an ORDER BY clause, etc. The allowdefault
parameter indicates whether DEFAULT can appear in the list instead
of an expression (useful when parsing the VALUES clause of an INSERT
statement for example).
"""
while True:
if not (allowdefault and self._match('DEFAULT')):
self._parse_expression()
if not self._match(','):
break
elif newlines:
self._newline()
def _parse_datatype_list(self, newlines=False):
"""Parses a comma separated list of data-types.
This is another common pattern in SQL, found when trying to define
the prototype of a function or procedure without using the specific
name (and a few other places).
"""
while True:
self._parse_datatype()
if not self._match(','):
break
elif newlines:
self._newline()
def _parse_ident_type_list(self, newlines=False):
"""Parses a comma separated list of identifiers and data-types.
This is a common pattern in SQL, found in the prototype of SQL
functions, the INCLUDE portion of a SELECT-FROM-DML statement, etc.
"""
while True:
self._expect(TT.IDENTIFIER)
self._parse_datatype()
if not self._match(','):
break
elif newlines:
self._newline()
def _parse_tuple(self, allowdefault=False):
"""Parses a full-select or a tuple (list) of expressions.
This is a common pattern found in SQL, for example on the right hand
side of the IN operator, in an UPDATE statement on the right hand side
of a parenthesized column list, etc. The easiest way to implement
this is by saving the current parser state, attempting to parse a
full-select, rewinding the state if this fails and parsing a tuple
of expressions.
The allowdefault parameter is propogated to parse_expression_list. See
parse_expression_list for more detail.
"""
# Opening parenthesis already matched
if self._peek_one_of(['SELECT', 'VALUES']):
# Parse a full-select
self._indent()
self._parse_full_select()
self._outdent()
else:
# Everything else (including a redundantly parenthesized
# full-select) can be parsed as an expression list
self._parse_expression_list(allowdefault)
# EXPRESSIONS and PREDICATES #############################################
def _parse_search_condition(self, newlines=True):
"""Parse a search condition (as part of WHERE/HAVING/etc.)"""
while True:
self._match('NOT')
# Ambiguity: open parentheses could indicate a parentheiszed search
# condition, or a parenthesized expression within a predicate
self._save_state()
try:
# Attempt to parse a parenthesized search condition
self._expect('(')
self._parse_search_condition(newlines)
self._expect(')')
except ParseError:
# If that fails, rewind and parse a predicate instead (which
# will parse a parenthesized expression)
self._restore_state()
self._parse_predicate()
if self._match('SELECTIVITY'):
self._expect(TT.NUMBER)
else:
self._forget_state()
if self._match_one_of(['AND', 'OR']):
if newlines:
self._newline(-1)
else:
break
def _parse_predicate(self):
"""Parse high precedence predicate operators (BETWEEN, IN, etc.)"""
if self._match('EXISTS'):
self._expect('(')
self._parse_full_select()
self._expect(')')
else:
self._parse_expression()
if self._match('NOT'):
if self._match('LIKE'):
self._parse_expression()
if self._match('ESCAPE'):
self._parse_expression()
elif self._match('BETWEEN'):
self._parse_expression()
self._expect('AND')
self._parse_expression()
elif self._match('IN'):
if self._match('('):
self._parse_tuple()
self._expect(')')
else:
self._parse_expression()
else:
self._expected_one_of(['LIKE', 'BETWEEN', 'IN'])
elif self._match('LIKE'):
self._parse_expression()
if self._match('ESCAPE'):
self._parse_expression()
elif self._match('BETWEEN'):
self._parse_expression()
self._expect('AND')
self._parse_expression()
elif self._match('IN'):
if self._match('('):
self._parse_tuple()
self._expect(')')
else:
self._parse_expression()
elif self._match('IS'):
self._match('NOT')
if self._match('VALIDATED'):
if self._match('ACCORDING'):
self._expect_sequence(['TO', 'XMLSCHEMA'])
if self._match('IN'):
self._expect('(')
while True:
self._parse_xml_schema_identification()
if not self._match(','):
break
self._expect(')')
else:
self._parse_xml_schema_identification()
else:
self._expect_one_of(['NULL', 'VALIDATED'])
elif self._match('XMLEXISTS'):
self._expect('(')
self._expect(TT.STRING)
if self._match('PASSING'):
self._match_sequence(['BY', 'REF'])
while True:
self._parse_expression()
self._expect_sequence(['AS', TT.IDENTIFIER])
self._match_sequence(['BY', 'REF'])
if not self._match(','):
break
self._expect(')')
elif self._match_one_of(['=', '<', '>', '<>', '<=', '>=']):
if self._match_one_of(['SOME', 'ANY', 'ALL']):
self._expect('(')
self._parse_full_select()
self._expect(')')
else:
self._parse_expression()
else:
self._expected_one_of([
'EXISTS',
'NOT',
'LIKE',
'BETWEEN',
'IS',
'IN',
'=',
'<',
'>',
'<>',
'<=',
'>='
])
def _parse_duration_label(self, optional=False):
labels = (
'YEARS',
'YEAR',
'DAYS',
'DAY',
'MONTHS',
'MONTH',
'HOURS',
'HOUR',
'MINUTES',
'MINUTE',
'SECONDS',
'SECOND',
'MICROSECONDS',
'MICROSECOND',
)
if optional:
self._match_one_of(labels)
else:
self._expect_one_of(labels)
def _parse_expression(self):
while True:
self._match_one_of(['+', '-'], postspace=False) # Unary +/-
if self._match('('):
self._parse_tuple()
self._expect(')')
elif self._match('CAST'):
self._parse_cast_expression()
elif self._match('XMLCAST'):
self._parse_cast_expression()
elif self._match('CASE'):
if self._match('WHEN'):
self._parse_searched_case()
else:
self._parse_simple_case()
elif self._match_sequence(['NEXT', 'VALUE', 'FOR']) or self._match_sequence(['NEXTVAL', 'FOR']):
self._parse_sequence_name()
elif self._match_sequence(['PREVIOUS', 'VALUE', 'FOR']) or self._match_sequence(['PREVVAL', 'FOR']):
self._parse_sequence_name()
elif self._match_sequence(['ROW', 'CHANGE']):
self._expect_one_of(['TOKEN', 'TIMESTAMP'])
self._expect('FOR')
self._parse_table_name()
elif self._match_one_of([TT.NUMBER, TT.STRING, TT.PARAMETER, 'NULL']): # Literals
pass
else:
# Ambiguity: an identifier could be a register, a function
# call, a column name, etc.
self._save_state()
try:
self._parse_function_call()
except ParseError:
self._restore_state()
self._save_state()
try:
self._parse_special_register()
except ParseError:
self._restore_state()
self._parse_column_name()
else:
self._forget_state()
else:
self._forget_state()
# Parse an optional array element suffix
if self._match('[', prespace=False):
self._parse_expression()
self._expect(']')
# Parse an optional interval suffix
self._parse_duration_label(optional=True)
if not self._match_one_of(['+', '-', '*', '/', '||', 'CONCAT']): # Binary operators
break
def _parse_function_call(self):
"""Parses a function call of various types"""
# Ambiguity: certain functions have "abnormal" internal syntaxes (extra
# keywords, etc). The _parse_scalar_function_call method is used to
# handle all "normal" syntaxes. Special methods are tried first for
# everything else
self._save_state()
try:
self._parse_aggregate_function_call()
except ParseError:
self._restore_state()
self._save_state()
try:
self._parse_olap_function_call()
except ParseError:
self._restore_state()
self._save_state()
try:
self._parse_xml_function_call()
except ParseError:
self._restore_state()
self._save_state()
try:
self._parse_sql_function_call()
except ParseError:
self._restore_state()
self._parse_scalar_function_call()
else:
self._forget_state()
else:
self._forget_state()
else:
self._forget_state()
else:
self._forget_state()
def _parse_aggregate_function_call(self):
"""Parses an aggregate function with it's optional arg-prefix"""
# Parse the optional SYSIBM schema prefix
if self._match('SYSIBM'):
self._expect('.')
# Although CORRELATION and GROUPING are aggregate functions they're not
# included here as their syntax is entirely compatible with "ordinary"
# functions so _parse_scalar_function_call will handle them
aggfunc = self._expect_one_of([
'ARRAY_AGG',
'COUNT',
'COUNT_BIG',
'AVG',
'MAX',
'MIN',
'STDDEV',
'SUM',
'VARIANCE',
'VAR',
]).value
self._expect('(', prespace=False)
if aggfunc in ('COUNT', 'COUNT_BIG') and self._match('*'):
# COUNT and COUNT_BIG can take '*' as a sole parameter
pass
elif aggfunc == 'ARRAY_AGG':
self._parse_expression()
if self._match_sequence(['ORDER', 'BY']):
while True:
self._parse_expression()
self._match_one_of(['ASC', 'DESC'])
if not self._match(','):
break
else:
# The aggregation functions handled by this method have an optional
# ALL/DISTINCT argument prefix
self._match_one_of(['ALL', 'DISTINCT'])
# And only take a single expression as an argument
self._parse_expression()
self._expect(')')
# Parse an OLAP suffix if one exists
if self._match('OVER'):
self._parse_olap_window_clause()
def _parse_olap_function_call(self):
"""Parses an OLAP function call (some of which have non-standard internal syntax)"""
if self._match('SYSIBM'):
self._expect('.')
olapfunc = self._expect_one_of([
'ROW_NUMBER',
'RANK',
'DENSE_RANK',
'LAG',
'LEAD',
'FIRST_VALUE',
'LAST_VALUE',
]).value
self._expect('(', prespace=False)
if olapfunc in ('LAG', 'LEAD'):
self._parse_expression()
if self._match(','):
self._expect(TT.NUMBER)
if sel._match(','):
self._parse_expression()
if self._match(','):
self._expect_one_of([(TT.STRING, 'RESPECT NULLS'), (TT.STRING, 'IGNORE NULLS')])
elif olapfunc in ('FIRST_VALUE', 'LAST_VALUE'):
self._parse_expression()
if self._match(','):
self._expect_one_of([(TT.STRING, 'RESPECT NULLS'), (TT.STRING, 'IGNORE NULLS')])
self._expect(')')
self._expect('OVER')
self._parse_olap_window_clause()
def _parse_xml_function_call(self):
"""Parses an XML function call (which has non-standard internal syntax)"""
# Parse the optional SYSIBM schema prefix
if self._match('SYSIBM'):
self._expect('.')
# Note that XML2CLOB (compatibility), XMLCOMMENT, XMLCONCAT,
# XMLDOCUMENT, XMLTEXT, and XMLXSROBJECTID aren't handled by this
# method as their syntax is "normal" so _parse_scalar_function_call
# will handle them
xmlfunc = self._expect_one_of([
'XMLAGG',
'XMLATTRIBUTES',
'XMLELEMENT',
'XMLFOREST',
'XMLGROUP',
'XMLNAMESPACES',
'XMLPARSE',
'XMLPI',
'XMLQUERY',
'XMLROW',
'XMLSERIALIZE',
'XMLVALIDATE',
'XMLTABLE',
'XMLTRANSFORM',
]).value
self._expect('(', prespace=False)
if xmlfunc == 'XMLAGG':
self._parse_expression()
if self._match_sequence(['ORDER', 'BY']):
while True:
self._parse_expression()
self._match_one_of(['ASC', 'DESC'])
if not self._match(','):
break
elif xmlfunc == 'XMLATTRIBUTES':
while True:
self._parse_expression()
if self._match('AS'):
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
elif xmlfunc == 'XMLELEMENT':
self._expect('NAME')
self._expect(TT.IDENTIFIER)
if self._match(','):
# XXX We're not specifically checking for namespaces and
# attributes calls as we should here (although expression_list
# will parse them just fine)
self._parse_expression_list()
if self._match('OPTION'):
self._parse_xml_value_option()
elif xmlfunc == 'XMLFOREST':
while True:
# XXX We're not specifically checking for a namespaces call as
# we should here (although expression will parse it just fine)
self._parse_expression()
self._match_sequence(['AS', TT.IDENTIFIER])
if not self._match(','):
break
if self._match('OPTION'):
self._parse_xml_value_option()
elif xmlfunc == 'XMLGROUP':
while True:
self._parse_expression()
if self._match('AS'):
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
if self._match_sequence(['ORDER', 'BY']):
while True:
self._parse_expression()
self._match_one_of(['ASC', 'DESC'])
if not self._match(','):
break
if self._match('OPTION'):
self._parse_xml_row_option(allowroot=True)
elif xmlfunc == 'XMLNAMESPACES':
while True:
if self._match('DEFAULT'):
self._expect(TT.STRING)
elif self._match('NO'):
self._expect_sequence(['DEFAULT', TT.STRING])
else:
self._expect_sequence([TT.STRING, 'AS', TT.IDENTIFIER])
if not self._match(','):
break
elif xmlfunc == 'XMLPARSE':
self._expect_sequence(['DOCUMENT', TT.STRING])
if self._match_one_of(['STRIP', 'PRESERVE']):
self._expect('WHITESPACE')
elif xmlfunc == 'XMLPI':
self._expect_sequence(['NAME', TT.IDENTIFIER])
if self._match(','):
self._expect(TT.STRING)
elif xmlfunc == 'XMLQUERY':
self._expect(TT.STRING)
if self._match('PASSING'):
self._match_sequence(['BY', 'REF'])
while True:
self._parse_expression()
self._expect_sequence(['AS', TT.IDENTIFIER])
self._match_sequence(['BY', 'REF'])
if not self._match(','):
break
if self._match('RETURNING'):
self._expect('SEQUENCE')
self._match_sequence(['BY', 'REF'])
self._match_sequence(['EMPTY', 'ON', 'EMPTY'])
elif xmlfunc == 'XMLROW':
while True:
self._parse_expression()
self._match_sequence(['AS', TT.IDENTIFIER])
if not self._match(','):
break
if self._match('OPTION'):
self._parse_xml_row_option(allowroot=False)
elif xmlfunc == 'XMLSERIALIZE':
self._match('CONTENT')
self._parse_expression()
self._expect('AS')
# XXX Data type can only be CHAR/VARCHAR/CLOB
self._parse_datatype()
valid = set(['VERSION', 'INCLUDING', 'EXCLUDING'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'VERSION':
self._expect(TT.STRING)
elif t == 'INCLUDING':
valid.remove('EXCLUDING')
self._expect('XMLDECLARATION')
elif t == 'EXCLUDING':
valid.remove('INCLUDING')
self._expect('XMLDECLARATION')
elif xmlfunc == 'XMLVALIDATE':
self._match('DOCUMENT')
self._parse_expression()
if self._match('ACCORDING'):
self._expect_sequence(['TO', 'XMLSCHEMA'])
self._parse_xml_schema_identification()
if self._match('NAMESPACE'):
self._expect(TT.STRING)
elif self._match('NO'):
self._expect('NAMESPACE')
self._match_sequence(['ELEMENT', TT.IDENTIFIER])
elif xmlfunc == 'XMLTABLE':
self._parse_expression()
if self._match(','):
self._expect(TT.STRING)
if self._match('PASSING'):
self._match_sequence(['BY', 'REF'])
while True:
self._parse_expression()
self._expect_sequence(['AS', TT.IDENTIFIER])
self._match_sequence(['BY', 'REF'])
if not self._match(','):
break
if self._match('COLUMNS'):
while True:
self._expect(TT.IDENTIFIER)
if not self._match_sequence(['FOR', 'ORDINALITY']):
self._parse_datatype()
self._match_sequence(['BY', 'REF'])
if self._match('DEFAULT'):
self._parse_expression()
if self._match('PATH'):
self._expect(TT.STRING)
if not self._match(','):
break
elif xmlfunc == 'XMLTRANSFORM':
self._parse_expression()
self._expect('USING')
self._parse_expression()
if self._match('WITH'):
self._parse_expression()
if self._match('AS'):
self._parse_datatype()
self._expect(')')
def _parse_xml_schema_identification(self):
"""Parses an identifier for an XML schema"""
# ACCORDING TO XMLSCHEMA already matched
if self._match('ID'):
self._parse_subschema_name()
else:
if self._match('URI'):
self._expect(TT.STRING)
elif self._match('NO'):
self._expect('NAMESPACE')
else:
self._expected_one_of(['ID', 'URI', 'NO'])
self._match_sequence(['LOCATION', TT.STRING])
def _parse_xml_row_option(self, allowroot=False):
"""Parses an XML OPTION suffix for rows in certain XML function calls"""
# OPTION already matched
valid = set(['ROW', 'AS'])
if allowroot:
valid.add('ROOT')
while valid:
t = self._expect_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t in ('ROW', 'ROOT'):
self._expect(TT.IDENTIFIER)
elif t == 'AS':
self._expect('ATTRIBUTES')
def _parse_xml_value_option(self):
"""Parses an XML OPTION suffix for scalar values in certain XML function calls"""
# OPTION already matched
valid = set(['EMPTY', 'NULL', 'XMLBINARY'])
while valid:
t = self._expect_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'EMPTY':
valid.remove('NULL')
self._expect_sequence(['ON', 'NULL'])
elif t == 'NULL':
valid.remove('EMPTY')
self._expect_sequence(['ON', 'NULL'])
elif t == 'XMLBINARY':
self._match('USING')
self._expect_one_of(['BASE64', 'HEX'])
def _parse_sql_function_call(self):
"""Parses scalar function calls with abnormal internal syntax (usually as dictated by the SQL standard)"""
# Parse the optional SYSIBM schema prefix
if self._match('SYSIBM'):
self._expect('.')
# Note that only the "special" syntax of functions is handled here.
# Most of these functions will also accept "normal" syntax. In that
# case, this method will raise a parse error and the caller will
# backtrack to handle the function as normal with
# _parse_scalar_function_call
sqlfunc = self._expect_one_of([
'CHAR_LENGTH',
'CHARACTER_LENGTH',
'OVERLAY',
'POSITION',
'SUBSTRING',
'TRIM',
]).value
self._expect('(', prespace=False)
if sqlfunc in ('CHAR_LENGTH', 'CHARACTER_LENGTH'):
self._parse_expression()
if self._match('USING'):
self._expect_one_of(['CODEUNITS16', 'CODEUNITS32', 'OCTETS'])
elif sqlfunc == 'OVERLAY':
self._parse_expression()
self._expect('PLACING')
self._parse_expression()
self._expect('FROM')
self._parse_expression()
if self._match('FOR'):
self._parse_expression()
self._expect('USING')
self._expect_one_of(['CODEUNITS16', 'CODEUNITS32', 'OCTETS'])
elif sqlfunc == 'POSITION':
self._parse_expression()
self._expect('IN')
self._parse_expression()
self._expect('USING')
self._expect_one_of(['CODEUNITS16', 'CODEUNITS32', 'OCTETS'])
elif sqlfunc == 'SUBSTRING':
self._parse_expression()
self._expect('FROM')
self._parse_expression()
if self._match('FOR'):
self._parse_expression()
self._expect('USING')
self._expect_one_of(['CODEUNITS16', 'CODEUNITS32', 'OCTETS'])
elif sqlfunc == 'TRIM':
if self._match_one_of(['BOTH', 'B', 'LEADING', 'L', 'TRAILING', 'T']):
self._match(TT.STRING)
self._expect('FROM')
self._parse_expression()
self._expect(')')
def _parse_scalar_function_call(self):
"""Parses a scalar function call with all its arguments"""
self._parse_function_name()
self._expect('(', prespace=False)
if not self._match(')'):
self._parse_expression_list()
self._expect(')')
def _parse_olap_range(self, optional):
"""Parses a ROWS or RANGE specification in an OLAP-function call"""
# [ROWS|RANGE] already matched
if self._match('CURRENT'):
self._expect('ROW')
elif self._match_one_of(['UNBOUNDED', TT.NUMBER]):
self._expect_one_of(['PRECEDING', 'FOLLOWING'])
elif not optional:
self._expected_one_of(['CURRENT', 'UNBOUNDED', TT.NUMBER])
else:
return False
return True
def _parse_olap_window_clause(self):
"""Parses the aggregation suffix in an OLAP-function call"""
# OVER already matched
self._expect('(')
if not self._match(')'):
self._indent()
if self._match('PARTITION'):
self._expect('BY')
self._parse_expression_list()
if self._match('ORDER'):
self._newline(-1)
self._expect('BY')
while True:
if self._match('ORDER'):
self._expect('OF')
self._parse_table_name()
else:
self._parse_expression()
self._match_one_of(['ASC', 'DESC'])
if self._match('NULLS'):
self._expect_one_of(['FIRST', 'LAST'])
if not self._match(','):
break
if self._match_one_of(['ROWS', 'RANGE']):
if not self._parse_olap_range(True):
self._expect('BETWEEN')
self._parse_olap_range(False)
self._expect('AND')
self._parse_olap_range(False)
self._outdent()
self._expect(')')
def _parse_cast_expression(self):
"""Parses a CAST() expression"""
# CAST already matched
self._expect('(', prespace=False)
self._parse_expression()
self._expect('AS')
self._parse_datatype()
if self._match('SCOPE'):
self._parse_relation_name()
self._expect(')')
def _parse_searched_case(self):
"""Parses a searched CASE expression (CASE WHEN expression...)"""
# CASE WHEN already matched
# Parse all WHEN cases
self._indent(-1)
while True:
self._parse_search_condition(newlines=False) # WHEN Search condition
self._expect('THEN')
self._parse_expression() # THEN Expression
if self._match('WHEN'):
self._newline(-1)
elif self._match('ELSE'):
self._newline(-1)
break
elif self._match('END'):
self._outdent(-1)
return
else:
self._expected_one_of(['WHEN', 'ELSE', 'END'])
# Parse the optional ELSE case
self._parse_expression() # ELSE Expression
self._outdent()
self._expect('END')
def _parse_simple_case(self):
"""Parses a simple CASE expression (CASE expression WHEN value...)"""
# CASE already matched
# Parse the CASE Expression
self._parse_expression() # CASE Expression
# Parse all WHEN cases
self._indent()
self._expect('WHEN')
while True:
self._parse_expression() # WHEN Expression
self._expect('THEN')
self._parse_expression() # THEN Expression
if self._match('WHEN'):
self._newline(-1)
elif self._match('ELSE'):
self._newline(-1)
break
elif self._match('END'):
self._outdent(-1)
return
else:
self._expected_one_of(['WHEN', 'ELSE', 'END'])
# Parse the optional ELSE case
self._parse_expression() # ELSE Expression
self._outdent()
self._expect('END')
def _parse_column_expression(self):
"""Parses an expression representing a column in a SELECT expression"""
if not self._match_sequence([TT.IDENTIFIER, '.', '*']):
self._parse_expression()
# Parse optional column alias
if self._match('AS'):
self._expect(TT.IDENTIFIER)
# Ambiguity: FROM and INTO can legitimately appear in this
# position as a KEYWORD (which the IDENTIFIER match below would
# accept)
elif not self._peek_one_of(['FROM', 'INTO']):
self._match(TT.IDENTIFIER)
def _parse_grouping_expression(self):
"""Parses a grouping-expression in a GROUP BY clause"""
if not self._match_sequence(['(', ')']):
self._parse_expression()
def _parse_super_group(self):
"""Parses a super-group in a GROUP BY clause"""
# [ROLLUP|CUBE] already matched
self._expect('(')
self._indent()
while True:
if self._match('('):
self._parse_expression_list()
self._expect(')')
else:
self._parse_expression()
if not self._match(','):
break
else:
self._newline()
self._outdent()
self._expect(')')
def _parse_grouping_sets(self):
"""Parses a GROUPING SETS expression in a GROUP BY clause"""
# GROUPING SETS already matched
self._expect('(')
self._indent()
while True:
if self._match('('):
while True:
if self._match_one_of(['ROLLUP', 'CUBE']):
self._parse_super_group()
else:
self._parse_grouping_expression()
if not self._match(','):
break
self._expect(')')
elif self._match_one_of(['ROLLUP', 'CUBE']):
self._parse_super_group()
else:
self._parse_grouping_expression()
if not self._match(','):
break
else:
self._newline()
self._outdent()
self._expect(')')
def _parse_group_by(self):
"""Parses the grouping-expression-list of a GROUP BY clause"""
# GROUP BY already matched
alt_syntax = True
while True:
if self._match('GROUPING'):
self._expect('SETS')
self._parse_grouping_sets()
alt_syntax = False
elif self._match_one_of(['ROLLUP', 'CUBE']):
self._parse_super_group()
alt_syntax = False
else:
self._parse_grouping_expression()
if not self._match(','):
break
else:
self._newline()
# Ambiguity: the WITH used in the alternate syntax for super-groups
# can be mistaken for the WITH defining isolation level at the end
# of a query. Hence we must use a sequence match here...
if alt_syntax:
if not self._match_sequence(['WITH', 'ROLLUP']):
self._match_sequence(['WITH', 'CUBE'])
def _parse_sub_select(self, allowinto=False):
"""Parses a sub-select expression"""
# SELECT already matched
self._match_one_of(['ALL', 'DISTINCT'])
if not self._match('*'):
self._indent()
while True:
self._parse_column_expression()
if not self._match(','):
break
else:
self._newline()
self._outdent()
if allowinto and self._match('INTO'):
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
self._expect('FROM')
self._indent()
while True:
self._parse_join_expression()
if not self._match(','):
break
else:
self._newline()
self._outdent()
if self._match('WHERE'):
self._indent()
self._parse_search_condition()
self._outdent()
if self._match_sequence(['GROUP', 'BY']):
self._indent()
self._parse_group_by()
self._outdent()
if self._match('HAVING'):
self._indent()
self._parse_search_condition()
self._outdent()
if self._match_sequence(['ORDER', 'BY']):
self._indent()
while True:
self._parse_expression()
self._match_one_of(['ASC', 'DESC'])
if not self._match(','):
break
else:
self._newline()
self._outdent()
if self._match_sequence(['FETCH', 'FIRST']):
self._match(TT.NUMBER) # Row count is optional (defaults to 1)
self._expect_one_of(['ROW', 'ROWS'])
self._expect('ONLY')
def _parse_table_correlation(self, optional=True):
"""Parses a table correlation clause (with optional column alias list)"""
if optional:
# An optional table correlation is almost always ambiguous given
# that it can start with just about any identifier (the AS is
# always optional)
self._save_state()
try:
# Call ourselves recursively to try and parse the correlation
self._parse_table_correlation(False)
except ParseError:
# If it fails, rewind and return
self._restore_state()
else:
self._forget_state()
else:
if self._match('AS'):
self._expect(TT.IDENTIFIER)
# Ambiguity: Several KEYWORDs can legitimately appear in this
# position. XXX This is horrible - there /must/ be a cleaner way of
# doing this with states and backtracking
elif not self._peek_one_of([
'DO',
'EXCEPT',
'MINUS',
'FETCH',
'GROUP',
'HAVING',
'CROSS',
'LEFT',
'RIGHT',
'FULL',
'INNER',
'JOIN',
'NATURAL',
'INTERSECT',
'ON',
'ORDER',
'SET',
'TABLESAMPLE',
'UNION',
'USING',
'WHERE',
'WITH',
]):
self._expect(TT.IDENTIFIER)
# Parse optional column aliases
if self._match('('):
self._parse_ident_list()
self._expect(')')
def _parse_values_expression(self, allowdefault=False, allowinto=False):
"""Parses a VALUES expression"""
# VALUES already matched
self._indent()
while True:
if self._match('('):
self._parse_expression_list(allowdefault)
self._expect(')')
else:
if not (allowdefault and self._match('DEFAULT')):
self._parse_expression()
if self._match(','):
self._newline()
else:
break
self._outdent()
if allowinto and self._match('INTO'):
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
def _parse_join_expression(self):
"""Parses join operators in a table-reference"""
self._parse_table_ref()
while True:
if self._match('CROSS'):
self._newline(-1)
self._expect('JOIN')
self._parse_table_ref()
elif self._match('INNER'):
self._newline(-1)
self._expect('JOIN')
self._parse_table_ref()
self._parse_join_condition()
elif self._match_one_of(['LEFT', 'RIGHT', 'FULL']):
self._newline(-1)
self._match('OUTER')
self._expect('JOIN')
self._parse_table_ref()
self._parse_join_condition()
elif self._match('JOIN'):
self._newline(-1)
self._parse_table_ref()
self._parse_join_condition()
else:
break
def _parse_lateral_options(self):
"""Parses the RETURN DATA UNTIL options of a LATERAL/TABLE reference"""
if self._match_sequence(['RETURN', 'DATA', 'UNTIL']):
while True:
self._expect_sequence(['FEDERATED', 'SQLSTATE'])
self._match('VALUE')
self._expect(TT.STRING)
if self._match('SQLCODE'):
while True:
self._expect(TT.NUMBER)
if not self._match(','):
break
if not self._match(','):
break
return True
else:
return False
def _parse_table_ref(self):
"""Parses literal table references or functions in a table-reference"""
# Ambiguity: A table or schema can be named TABLE, FINAL, OLD, etc.
reraise = False
self._save_state()
try:
if self._match('('):
# Ambiguity: Open-parenthesis could indicate a full-select or a
# join expression
self._save_state()
try:
# Try and parse a full-select
self._parse_full_select()
reraise = True
self._expect(')')
self._parse_table_correlation(optional=True)
except ParseError:
# If it fails, rewind and try a join expression instead
self._restore_state()
if reraise: raise
self._parse_join_expression()
self._expect(')')
else:
self._forget_state()
elif self._match('LATERAL'):
self._parse_lateral_options()
self._expect('(', prespace=False)
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
self._parse_table_correlation(optional=True)
elif self._match('TABLE'):
lateral = self._parse_lateral_options()
self._expect('(', prespace=False)
# Ambiguity: TABLE() can indicate a table-function call or a
# nested table expression
self._save_state()
try:
# Try and parse a full-select
self._indent()
self._parse_full_select()
self._outdent()
except ParseError:
# If it fails, rewind and try a function call instead
self._restore_state()
if lateral: raise
self._parse_function_call()
else:
self._forget_state()
reraise = True
self._expect(')')
self._parse_table_correlation(optional=True)
elif self._match_one_of(['FINAL', 'NEW']):
self._expect('TABLE')
self._expect('(', prespace=False)
self._indent()
if self._expect_one_of(['INSERT', 'UPDATE']).value == 'INSERT':
self._parse_insert_statement()
else:
self._parse_update_statement()
reraise = True
self._outdent()
self._expect(')')
self._parse_table_correlation(optional=True)
elif self._match('OLD'):
self._expect('TABLE')
self._expect('(', prespace=False)
self._indent()
if self._expect_one_of(['UPDATE', 'DELETE']).value == 'DELETE':
self._parse_delete_statement()
else:
self._parse_update_statement()
reraise = True
self._outdent()
self._expect(')')
self._parse_table_correlation(optional=True)
elif self._match('UNNEST'):
self._expect('(', prespace=False)
self._indent()
while True:
if self._match('CAST'):
self._parse_cast_expression()
else:
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
self._outdent()
self._expect(')')
self._parse_table_correlation(optional=False)
elif self._peek('XMLTABLE'):
# Bizarrely, the XMLTABLE table function can be used outside a
# TABLE() reference...
self._parse_xml_function_call()
else:
raise ParseBacktrack()
except ParseError:
# If the above fails, rewind and try a simple table reference
self._restore_state()
if reraise: raise
self._parse_table_name()
self._parse_table_correlation(optional=True)
if self._match('TABLESAMPLE'):
self._expect_one_of(['BERNOULLI', 'SYSTEM'])
self._expect('(')
self._parse_expression()
self._expect(')')
if self._match('REPEATABLE'):
self._expect('(')
self._parse_expression()
self._expect(')')
else:
self._forget_state()
def _parse_join_condition(self):
"""Parses the condition on an SQL-92 style join"""
# This method can be extended to support USING(ident-list) if this
# if ever added to DB2 (see PostgreSQL)
self._indent()
self._expect('ON')
self._parse_search_condition()
self._outdent()
def _parse_full_select(self, allowdefault=False, allowinto=False):
"""Parses set operators (low precedence) in a full-select expression"""
self._parse_relation(allowdefault, allowinto)
while True:
if self._match_one_of(['UNION', 'INTERSECT', 'EXCEPT', 'MINUS']):
self._newline(-1)
self._newline(-1, allowempty=True)
self._match('ALL')
self._newline()
self._newline(allowempty=True)
# No need to include allowinto here (it's only permitted in a
# top-level subselect)
self._parse_relation(allowdefault)
else:
break
if self._match('ORDER'):
self._expect('BY')
while True:
self._parse_expression()
self._match_one_of(['ASC', 'DESC'])
if not self._match(','):
break
if self._match('FETCH'):
self._expect('FIRST')
self._match(TT.NUMBER) # Row count is optional (defaults to 1)
self._expect_one_of(['ROW', 'ROWS'])
self._expect('ONLY')
def _parse_relation(self, allowdefault=False, allowinto=False):
"""Parses relation generators (high precedence) in a full-select expression"""
# XXX Add support for the TABLE statement from the SQL standard
if self._match('('):
self._indent()
# No need to include allowinto here (it's only permitted in a
# top-level subselect)
self._parse_full_select(allowdefault)
self._outdent()
self._expect(')')
elif self._match('SELECT'):
self._parse_sub_select(allowinto)
elif self._match('VALUES'):
self._parse_values_expression(allowdefault, allowinto)
else:
self._expected_one_of(['SELECT', 'VALUES', '('])
def _parse_query(self, allowdefault=False, allowinto=False):
"""Parses a full-select with optional common-table-expression"""
# Parse the optional common-table-expression
if self._match('WITH'):
while True:
self._expect(TT.IDENTIFIER)
# Parse the optional column-alias list
if self._match('('):
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
self._expect(')')
self._expect('AS')
self._expect('(')
self._indent()
# No need to include allowdefault or allowinto here. Neither
# are ever permitted in a CTE
self._parse_full_select()
self._outdent()
self._expect(')')
if not self._match(','):
break
else:
self._newline()
self._newline()
# Parse the actual full-select. DEFAULT may be permitted here if the
# full-select turns out to be a VALUES statement
self._parse_full_select(allowdefault, allowinto)
# CLAUSES ################################################################
def _parse_assignment_clause(self, allowdefault):
"""Parses a SET clause"""
# SET already matched
while True:
if self._match('('):
# Parse tuple assignment
while True:
self._parse_subrelation_name()
if not self._match(','):
break
self._expect_sequence([')', '=', '('])
self._parse_tuple(allowdefault=True)
self._expect(')')
else:
# Parse simple assignment
self._parse_subrelation_name()
if self._match('['):
self._parse_expression()
self._expect(']')
if self._match('.'):
self._expect(TT.IDENTIFIER)
self._expect('=')
if self._match('ARRAY'):
self._expect('[', prespace=False)
# Ambiguity: Expression list vs. select-statement
self._save_state()
try:
self._parse_expression_list()
except ParseError:
self._restore_state()
self._parse_full_select()
else:
self._forget_state()
self._expect(']')
elif not (allowdefault and self._match('DEFAULT')):
self._parse_expression()
if not self._match(','):
break
else:
self._newline()
def _parse_identity_options(self, alter=None):
"""Parses options for an IDENTITY column"""
# AS IDENTITY already matched
# Build a couple of lists of options which have not yet been seen
validno = [
'MINVALUE',
'MAXVALUE',
'CACHE',
'CYCLE',
'ORDER',
]
valid = validno + ['INCREMENT', 'NO']
if alter is None:
valid = valid + ['START']
elif alter == 'SEQUENCE':
valid = valid + ['RESTART']
# XXX Allow backward compatibility options here? Backward
# compatibility options include comma separation of arguments, and
# NOMINVALUE instead of NO MINVALUE, etc.
while valid:
if alter == 'COLUMN':
if self._match('RESTART'):
if self._match('WITH'):
self._expect(TT.NUMBER)
continue
elif self._match('SET'):
t = self._expect_one_of(valid).value
if t != 'NO': valid.remove(t)
if t in validno: validno.remove(t)
else:
break
else:
t = self._match_one_of(valid)
if t:
t = t.value
if t != 'NO': valid.remove(t)
if t in validno: validno.remove(t)
else:
break
if t == 'START':
self._expect_sequence(['WITH', TT.NUMBER])
elif t == 'RESTART':
if self._match('WITH'):
self._expect(TT.NUMBER)
elif t == 'INCREMENT':
self._expect_sequence(['BY', TT.NUMBER])
elif t in ('MINVALUE', 'MAXVALUE', 'CACHE'):
self._expect(TT.NUMBER)
elif t in ('CYCLE', 'ORDER'):
pass
elif t == 'NO':
t = self._expect_one_of(validno).value
validno.remove(t)
valid.remove(t)
def _parse_column_definition(self, aligntypes=False, alignoptions=False, federated=False):
"""Parses a column definition in a CREATE TABLE statement"""
# Parse a column definition
self._expect(TT.IDENTIFIER)
if aligntypes:
self._valign()
self._parse_datatype()
if alignoptions and not self._peek_one_of([',', ')']):
self._valign()
# Parse column options
while True:
if self._match('NOT'):
self._expect_one_of(['NULL', 'LOGGED', 'COMPACT', 'HIDDEN'])
elif self._match('LOGGED'):
pass
elif self._match('COMPACT'):
pass
elif self._match('WITH'):
self._expect('DEFAULT')
self._save_state()
try:
self._parse_expression()
except ParseError:
self._restore_state()
else:
self._forget_state()
elif self._match('DEFAULT'):
self._save_state()
try:
self._parse_expression()
except ParseError:
self._restore_state()
else:
self._forget_state()
elif self._match('GENERATED'):
if self._expect_one_of(['ALWAYS', 'BY']).value == 'BY':
self._expect('DEFAULT')
if self._match('AS'):
if self._match('IDENTITY'):
if self._match('('):
self._parse_identity_options()
self._expect(')')
elif self._match('('):
self._parse_expression()
self._expect(')')
else:
self._expected_one_of(['IDENTITY', '('])
else:
self._expect_sequence(['FOR', 'EACH', 'ROW', 'ON', 'UPDATE', 'AS', 'ROW', 'CHANGE', 'TIMESTAMP'])
elif self._match('INLINE'):
self._expect_sequence(['LENGTH', TT.NUMBER])
elif self._match('COMPRESS'):
self._expect_sequence(['SYSTEM', 'DEFAULT'])
elif self._match('COLUMN'):
self._expect_sequence(['SECURED', 'WITH', TT.IDENTIFIER])
elif self._match('SECURED'):
self._expect_sequence(['WITH', TT.IDENTIFIER])
elif self._match('IMPLICITLY'):
self._expect('HIDDEN')
elif federated and self._match('OPTIONS'):
self._parse_federated_options()
else:
self._save_state()
try:
self._parse_column_constraint()
except ParseError:
self._restore_state()
break
else:
self._forget_state()
def _parse_column_constraint(self):
"""Parses a constraint attached to a specific column in a CREATE TABLE statement"""
# Parse the optional constraint name
if self._match('CONSTRAINT'):
self._expect(TT.IDENTIFIER)
# Parse the constraint definition
if self._match('PRIMARY'):
self._expect('KEY')
elif self._match('UNIQUE'):
pass
elif self._match('REFERENCES'):
self._parse_table_name()
if self._match('(', prespace=False):
self._expect(TT.IDENTIFIER)
self._expect(')')
t = ['DELETE', 'UPDATE']
for i in xrange(2):
if self._match('ON'):
t.remove(self._expect_one_of(t).value)
if self._match('NO'):
self._expect('ACTION')
elif self._match('SET'):
self._expect('NULL')
elif self._match_one_of(['RESTRICT', 'CASCADE']):
pass
else:
self._expected_one_of([
'RESTRICT',
'CASCADE',
'NO',
'SET'
])
else:
break
elif self._match('CHECK'):
self._expect('(')
# Ambiguity: check constraint can be a search condition or a
# functional dependency. Try the search condition first
self._save_state()
try:
self._parse_search_condition()
except ParseError:
self._restore_state()
if self._match('('):
self._parse_ident_list()
self._expect(')')
else:
self._expect(TT.IDENTIFIER)
self._expect_sequence(['DETERMINED', 'BY'])
if self._match('('):
self._parse_ident_list()
self._expect(')')
else:
self._expect(TT.IDENTIFIER)
else:
self._forget_state()
self._expect(')')
else:
self._expected_one_of([
'CONSTRAINT',
'PRIMARY',
'UNIQUE',
'REFERENCES',
'CHECK'
])
def _parse_table_constraint(self):
"""Parses a constraint attached to a table in a CREATE TABLE statement"""
if self._match('CONSTRAINT'):
self._expect(TT.IDENTIFIER)
if self._match('PRIMARY'):
self._expect('KEY')
self._expect('(')
self._parse_ident_list()
self._expect(')')
elif self._match('UNIQUE'):
self._expect('(')
self._parse_ident_list()
self._expect(')')
elif self._match('FOREIGN'):
self._expect('KEY')
self._expect('(')
self._parse_ident_list()
self._expect(')')
self._expect('REFERENCES')
self._parse_subschema_name()
self._expect('(', prespace=False)
self._parse_ident_list()
self._expect(')')
t = ['DELETE', 'UPDATE']
for i in xrange(2):
if self._match('ON'):
t.remove(self._expect_one_of(t).value)
if self._match('NO'):
self._expect('ACTION')
elif self._match('SET'):
self._expect('NULL')
elif self._match_one_of(['RESTRICT', 'CASCADE']):
pass
else:
self._expected_one_of([
'RESTRICT',
'CASCADE',
'NO',
'SET'
])
else:
break
elif self._match('CHECK'):
self._expect('(')
# Ambiguity: check constraint can be a search condition or a
# functional dependency. Try the search condition first
self._save_state()
try:
self._parse_search_condition(newlines=False)
except ParseError:
self._restore_state()
if self._match('('):
self._parse_ident_list()
self._expect(')')
else:
self._expect(TT.IDENTIFIER)
self._expect_sequence(['DETERMINED', 'BY'])
if self._match('('):
self._parse_ident_list()
self._expect(')')
else:
self._expect(TT.IDENTIFIER)
else:
self._forget_state()
self._expect(')')
else:
self._expected_one_of([
'CONSTRAINT',
'PRIMARY',
'UNIQUE',
'FOREIGN',
'CHECK'
])
def _parse_table_definition(self, aligntypes=False, alignoptions=False, federated=False):
"""Parses a table definition (list of columns and constraints)"""
self._expect('(')
self._indent()
while True:
self._save_state()
try:
# Try parsing a table constraint definition
self._parse_table_constraint()
except ParseError:
# If that fails, rewind and try and parse a column definition
self._restore_state()
self._parse_column_definition(aligntypes=aligntypes, alignoptions=alignoptions, federated=federated)
else:
self._forget_state()
if not self._match(','):
break
else:
self._newline()
if aligntypes:
self._vapply()
if alignoptions:
self._vapply()
self._outdent()
self._expect(')')
def _parse_constraint_alteration(self):
"""Parses a constraint-alteration in an ALTER TABLE statement"""
# FOREIGN KEY/CHECK already matched
self._expect(TT.IDENTIFIER)
if self._match_one_of(['ENABLE', 'DISABLE']):
self._expect_sequence(['QUERY', 'OPTIMIZATION'])
else:
self._match('NOT')
self._expect('ENFORCED')
def _parse_column_alteration(self):
"""Parses a column-alteration in an ALTER TABLE statement"""
self._expect(TT.IDENTIFIER)
if self._match('DROP'):
if self._match('NOT'):
self._expect('NULL')
elif self._match('COLUMN'):
self._expect('SECURITY')
else:
self._expect_one_of([
'NOT',
'COLUMN',
'IDENTITY',
'DEFAULT',
'EXPRESSION'
])
elif self._match('COMPRESS'):
if self._match('SYSTEM'):
self._expect('DEFAULT')
else:
self._expect('OFF')
elif self._match('SECURED'):
self._expect_sequence(['WITH', TT.IDENTIFIER])
else:
# Ambiguity: SET can introduce several different alterations
self._save_state()
try:
# Try and parse SET (DATA TYPE | EXPRESSION | INLINE LENGTH | GENERATED)
self._expect('SET')
if self._match('DATA'):
self._expect('TYPE')
self._parse_datatype()
elif self._match('EXPRESSION'):
self._expect('AS')
self._expect('(')
self._parse_expression()
self._expect(')')
elif self._match('INLINE'):
self._expect_sequence(['LENGTH', TT.NUMBER])
elif self._match('GENERATED'):
if self._match(['BY', 'ALWAYS']).value == 'BY':
self._expect('DEFAULT')
self._expect('AS')
if self._match('IDENTITY'):
if self._match('('):
self._parse_identity_options()
self._expect(')')
elif self._match('('):
self._parse_expression()
self._expect(')')
else:
self._expected_one_of(['IDENTITY', '('])
elif self._match('NOT'):
self._expect('NULL')
else:
raise ParseBacktrack()
except ParseBacktrack:
# NOTE: This exception block is only called on a ParseBacktrack
# error. Other parse errors will propogate outward. If the
# above SET clauses didn't match, try an identity-alteration.
self._restore_state()
self._parse_identity_options(alter='COLUMN')
else:
self._forget_state()
def _parse_federated_column_alteration(self):
"""Parses a column-alteration in an ALTER NICKNAME statement"""
self._expect(TT.IDENTIFIER)
while True:
if self._match('LOCAL'):
if self._match('NAME'):
self._expect(TT.IDENTIFIER)
elif self._match('TYPE'):
self._parse_datatype()
elif self._match('OPTIONS'):
self._parse_federated_options(alter=True)
if not self._match(','):
break
def _parse_auth_list(self):
"""Parses an authorization list in a GRANT or REVOKE statement"""
# [TO|FROM] already matched
while True:
if not self._match('PUBLIC'):
self._match_one_of(['USER', 'GROUP', 'ROLE'])
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
def _parse_grant_revoke(self, grant):
"""Parses the body of a GRANT or REVOKE statement"""
# [GRANT|REVOKE] already matched
# Parse any preamble
seclabel = False
if self._match('ROLE'):
pass
elif self._match_sequence(['SECURITY', 'LABEL']):
seclabel = grant
# Parse the privilege list
while True:
priv = self._expect(TT.IDENTIFIER)
if priv.value in ('REFERENCES', 'UPDATE'):
if self._match('('):
self._parse_ident_list()
self._expect(')')
elif priv.value == 'DBADM':
while self._match_one_of(['WITH', 'WITHOUT']):
self._expect_one_of(['DATAACCESS', 'ACCESSCTRL'])
elif priv.value == 'DB2LBACWRITEARRAY':
self._expect_one_of(['WRITEDOWN', 'WRITEUP'])
elif priv.value == 'ALL':
self._match('PRIVILEGES')
break
if not self._match(','):
break
# Parse the target list
if self._match('OF'):
self._expect_sequence(['TABLESPACE', TT.IDENTIFIER])
elif self._match('ON'):
while True:
if self._match('DATABASE'):
break
elif self._match('RULE'):
if self._expect_one_of([
'DB2LBACREADARRAY',
'DB2LBACREADSET',
'DB2LBACREADTREE',
'DB2LBACWRITEARRAY',
'DB2LBACWRITESET',
'DB2LBACWRITETREE',
'ALL'
]).value == 'DB2LBACWRITEARRAY':
self._expect_one_of(['WRITEDOWN', 'WRITEUP'])
self._expect_sequence(['FOR', TT.IDENTIFIER])
break
elif self._match('VARIABLE'):
self._parse_variable_name()
break
elif self._match('INDEX'):
self._parse_index_name()
break
elif self._match('MODULE'):
self._parse_module_name()
break
elif self._match_one_of(['PROGRAM', 'PACKAGE']):
self._parse_subschema_name()
break
elif self._match_one_of(['FUNCTION', 'PROCEDURE']):
# Ambiguity: Can use schema.* or schema.name(prototype) here
if not self._match('*') and not self._match_sequence([TT.IDENTIFIER, '.', '*']):
self._parse_routine_name()
if self._match('(', prespace=False):
self._parse_datatype_list()
self._expect(')')
break
elif self._match('SPECIFIC'):
self._expect_one_of(['FUNCTION', 'PROCEDURE'])
self._parse_routine_name()
break
elif self._match('SCHEMA'):
self._expect(TT.IDENTIFIER)
break
elif self._match('SEQUENCE'):
self._parse_sequence_name()
break
elif self._match('SERVER'):
self._expect(TT.IDENTIFIER)
break
elif self._match('USER'):
self._expect(TT.IDENTIFIER)
elif self._match('PUBLIC'):
pass
elif self._match('TABLE'):
self._parse_table_name()
break
elif self._match('WORKLOAD'):
self._expect(TT.IDENTIFIER)
break
elif self._match('XSROBJECT'):
self._parse_subschema_name()
break
else:
self._parse_table_name()
break
if not self._match(','):
break
# Parse the grantee(s)
# XXX The following is a bit lax, but again, adhering strictly to the
# syntax results in a ridiculously complex syntax
self._expect(['FROM', 'TO'][grant])
self._parse_auth_list()
if seclabel:
if self._match('FOR'):
self._expect_one_of(['ALL', 'READ', 'WRITE'])
self._expect('ACCESS')
elif grant:
if self._match('WITH'):
self._expect_one_of(['GRANT', 'ADMIN'])
self._expect('OPTION')
else:
self._match_sequence(['BY', 'ALL'])
self._match('RESTRICT')
def _parse_tablespace_size_attributes(self):
"""Parses DMS size attributes in a CREATE TABLESPACE statement"""
if self._match('AUTORESIZE'):
self._expect_one_of(['NO', 'YES'])
if self._match('INTIALSIZE'):
self._expect(TT.NUMBER)
self._expect_one_of(['K', 'M', 'G'])
if self._match('INCREASESIZE'):
self._expect(TT.NUMBER)
self._expect_one_of(['K', 'M', 'G', 'PERCENT'])
if self._match('MAXSIZE'):
if not self._match('NONE'):
self._expect(TT.NUMBER)
self._expect_one_of(['K', 'M', 'G'])
def _parse_database_container_clause(self, size=True):
"""Parses a container clause for a DMS tablespace"""
self._expect('(')
while True:
self._expect_one_of(['FILE', 'DEVICE'])
self._expect(TT.STRING)
if size:
self._expect(TT.NUMBER)
self._match_one_of(['K', 'M', 'G'])
if not self._match(','):
break
self._expect(')')
def _parse_system_container_clause(self):
"""Parses a container clause for an SMS tablespace"""
self._expect('(')
while True:
self._expect(TT.STRING)
if not self._match(','):
break
self._expect(')')
def _parse_db_partition_clause(self):
"""Parses a DBPARTITIONNUM clause in various statements"""
if not self._match('GLOBAL'):
if self._match('AT'):
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
def _parse_db_partition_list_clause(self, size=False):
"""Parses an DBPARTITIONNUM clause in various statements"""
self._expect_one_of([
'DBPARTITIONNUM',
'DBPARTITIONNUMS',
'NODE', # compatibility option
'NODES', # compatibility option
])
self._expect('(')
while True:
self._expect(TT.NUMBER)
self._match_sequence(['TO', TT.NUMBER])
if size:
self._expect_sequence(['SIZE', TT.NUMBER])
if not self._match(','):
break
self._expect(')')
def _parse_db_partitions_clause(self):
"""Parses a DBPARTITIONNUM list clause in various statements"""
if self._match('ON'):
if self._match('ALL'):
self._expect_one_of(['DBPARTITIONNUMS', 'NODES'])
if self._match('EXCEPT'):
self._parse_db_partition_list_clause(size=False)
else:
self._parse_db_partition_list_clause(size=False)
def _parse_function_predicates_clause(self):
"""Parses the PREDICATES clause in a CREATE FUNCTION statement"""
# PREDICATES already matched
# The surrounding parentheses seem to be optional (although the syntax
# diagram in the DB2 Info Center implies otherwise)
parens = self._match('(')
self._expect('WHEN')
self._match_one_of(['=', '<>', '<', '>', '<=', '>='])
if self._match('EXPRESSION'):
self._expect_sequence(['AS', TT.IDENTIFIER])
else:
self._parse_expression()
valid = ['SEARCH', 'FILTER']
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'SEARCH':
self._expect('BY')
self._match('EXACT')
self._expect('INDEX')
self._expect('EXTENSION')
self._parse_index_name()
self._expect('WHEN')
while True:
self._expect_sequence(['KEY', '(', TT.IDENTIFIER, ')', 'USE', TT.IDENTIFIER, '('])
self._parse_ident_list()
self._expect(')')
if not self._match('WHEN'):
break
elif t == 'FILTER':
self._expect('USING')
if self._match('CASE'):
if self._match('WHEN'):
self._parse_searched_case()
else:
self._parse_simple_case()
else:
self._parse_scalar_function_call()
if parens:
self._expect(')')
def _parse_federated_options(self, alter=False):
"""Parses an OPTIONS list for a federated object"""
# OPTIONS already matched
self._expect('(')
while True:
if alter and self._match('DROP'):
self._expect(TT.IDENTIFIER)
else:
if alter:
self._match_one_of('ADD', 'SET')
else:
self._match('ADD')
self._expect(TT.IDENTIFIER)
self._expect(TT.STRING)
if not self._match(','):
break
self._expect(')')
def _parse_remote_server(self):
"""Parses a remote server specification"""
# SERVER already matched
if self._match('TYPE'):
self._expect(TT.IDENTIFIER)
if self._match('VERSION'):
self._parse_server_version()
if self._match('WRAPPER'):
self._expect(TT.IDENTIFIER)
else:
self._expect(TT.IDENTIFIER)
if self._match('VERSION'):
self._parse_server_version()
def _parse_server_version(self):
"""Parses a federated server version"""
# VERSION already matched
if self._match(TT.NUMBER):
if self._match('.'):
self._expect(TT.NUMBER)
if self._match('.'):
self._expect(TT.NUMBER)
elif self._match(TT.STRING):
pass
else:
self._expected_one_of([TT.NUMBER, TT.STRING])
def _parse_partition_boundary(self):
"""Parses a partition boundary in a PARTITION clause"""
if self._match('STARTING'):
self._match('FROM')
if self._match('('):
while True:
self._expect_one_of([TT.NUMBER, 'MINVALUE', 'MAXVALUE'])
if not self._match(','):
break
self._expect(')')
else:
self._expect_one_of([TT.NUMBER, 'MINVALUE', 'MAXVALUE'])
self._match_one_of(['INCLUSIVE', 'EXCLUSIVE'])
self._expect('ENDING')
self._match('AT')
if self._match('('):
while True:
self._expect_one_of([TT.NUMBER, 'MINVALUE', 'MAXVALUE'])
if not self._match(','):
break
self._expect(')')
else:
self._expect_one_of([TT.NUMBER, 'MINVALUE', 'MAXVALUE'])
self._match_one_of(['INCLUSIVE', 'EXCLUSIVE'])
def _parse_copy_options(self):
"""Parse copy options for CREATE TABLE... LIKE statements"""
# XXX Tidy this up (shouldn't just be a 2-time loop)
for i in xrange(2):
if self._match_one_of(['INCLUDING', 'EXCLUDING']):
if self._match('COLUMN'):
self._expect('DEFAULTS')
elif self._match('DEFAULTS'):
pass
elif self._match('IDENTITY'):
self._match_sequence(['COLUMN', 'ATTRIBUTES'])
def _parse_refreshable_table_options(self, alter=False):
"""Parses refreshable table options in a materialized query definition"""
if not alter and self._match('WITH'):
self._expect_sequence(['NO', 'DATA'])
self._parse_copy_options()
else:
valid = [
'DATA',
'REFRESH',
'ENABLE',
'DISABLE',
'MAINTAINED',
]
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'DATA':
self._expect_sequence(['INITIALLY', 'DEFERRED'])
elif t == 'REFRESH':
self._expect_one_of(['DEFERRED', 'IMMEDIATE'])
elif t in ('ENABLE', 'DISABLE'):
self._expect_sequence(['QUERY', 'OPTIMIZATION'])
if t == 'ENABLE':
valid.remove('DISABLE')
else:
valid.remove('ENABLE')
elif t == 'MAINTAINED':
self._expect('BY')
self._expect_one_of(['SYSTEM', 'USER', 'FEDERATED_TOOL'])
def _parse_action_types_clause(self):
"""Parses an action types clause in a WORK ACTION"""
if self._match('MAP'):
self._expect('ACTIVITY')
if self._match_one_of(['WITH', 'WITHOUT']):
self._expect('NESTED')
self._expect('TO')
self._expect(TT.IDENTIFIER)
elif self._match('WHEN'):
self._parse_threshold_predicate()
self._parse_threshold_exceeded_actions()
elif self._match('PREVENT'):
self._expect('EXECUTION')
elif self._match('COUNT'):
self._expect('ACTIVITY')
elif self._match('COLLECT'):
if self._match('ACTIVITY'):
self._expect('DATA')
self._parse_collect_activity_data_clause()
elif self._match('AGGREGATE'):
self._expect_sequence(['ACTIVITY', 'DATA'])
self._match_one_of(['BASE', 'EXTENDED'])
else:
self._expected_one_of(['MAP', 'WHEN', 'PREVENT', 'COUNT', 'COLLECT'])
def _parse_threshold_predicate(self):
"""Parses a threshold predicate in a WORK ACTION"""
if self._match_one_of([
'TOTALDBPARTITIONCONNECTIONS',
'CONCURRENTWORKLOADOCCURRENCES',
'CONCURRENTWORKLOADACTIVITIES',
'ESTIMATEDSQLCOST',
'SQLROWSRETURNED',
]):
self._expect_sequence(['>', TT.NUMBER])
elif self._match('TOTALSCPARTITIONCONNECTIONS'):
self._expect_sequence(['>', TT.NUMBER])
if self._match('QUEUEDCONNECTIONS'):
if self._match('>'):
self._expect(TT.NUMBER)
elif self._match('UNBOUNDED'):
pass
else:
self._expected_one_of(['>', 'UNBOUNDED'])
elif self._match('CONCURRENTDBCOORDACTIVITIES'):
self._expect_sequence(['>', TT.NUMBER])
if self._match('QUEUEDACTIVITIES'):
if self._match('>'):
self._expect(TT.NUMBER)
elif self._match('UNBOUNDED'):
pass
else:
self._expected_one_of(['>', 'UNBOUNDED'])
elif self._match_one_of([
'CONNECTIONIDLETIME',
'ACTIVITYTOTALTIME',
]):
self._expect_sequence(['>', TT.NUMBER])
self._expect_one_of([
'DAY',
'DAYS',
'HOUR',
'HOURS',
'MINUTE',
'MINUTES'
])
elif self._match('SQLTEMPSPACE'):
self._expect_sequence(['>', TT.NUMBER])
self._expect_one_of(['K', 'M', 'G'])
def _parse_threshold_exceeded_actions(self):
"""Parses a threshold exceeded actions clause in a WORK ACTION"""
if self._match_sequence(['COLLECT', 'ACTIVITY', 'DATA']):
self._parse_collect_activity_data_clause(alter=True)
if self._match('STOP'):
self._expect('EXECUTION')
elif not self._match('CONTINUE'):
self._expected_one_of(['STOP', 'CONTINUE'])
def _parse_collect_activity_data_clause(self, alter=False):
"""Parses a COLLECT ACTIVITY clause in an action clause"""
# COLLECT ACTIVITY DATA already matched
if not (alter and self._match('NONE')):
self._expect('ON')
if self._match('ALL'):
self._match_sequence(['DATABASE', 'PARTITIONS'])
elif self._match('COORDINATOR'):
self._match_sequence(['DATABASE', 'PARTITION'])
else:
self._expected_one_of(['ALL', 'COORDINATOR'])
if self._match('WITHOUT'):
self._expect('DETAILS')
elif self._match('WITH'):
self._expect('DETAILS')
if self._match('AND'):
self._expect('VALUES')
else:
self._expected_one_of(['WITHOUT', 'WITH'])
def _parse_histogram_template_clause(self):
"""Parses a history template clause in a WORK ACTION"""
if self._match('ACTIVITY'):
self._expect_one_of(['LIFETIME', 'QUEUETIME', 'EXECUTETIME', 'ESIMATEDCOST', 'INTERARRIVALTIME'])
self._expect_sequence(['HISTOGRAM', 'TEMPLATE'])
self._expect_one_of(['SYSDEFAULTHISTOGRAM', TT.IDENTIFIER])
def _parse_work_attributes(self):
"""Parses a work attributes clause in a WORK CLASS"""
self._expect_sequence(['WORK', 'TYPE'])
if self._match_one_of(['READ', 'WRITE', 'DML']):
self._parse_for_from_to_clause()
elif self._match('ALL'):
if self._match('FOR'):
self._parse_for_from_to_clause()
if self._match('ROUTINES'):
self._parse_routines_in_schema_clause()
elif self._match('CALL'):
if self._match('ROUTINES'):
self._parse_routines_in_schema_clause()
elif not self._match_one_of(['DDL', 'LOAD']):
self._expected_one_of(['READ', 'WRITE', 'DML', 'DDL', 'LOAD', 'ALL', 'CALL'])
def _parse_for_from_to_clause(self, alter=False):
"""Parses a FOR .. FROM .. TO clause in a WORK CLASS definition"""
# FOR already matched
if alter and self._match('ALL'):
self._expect_sequence(['UNITS', 'UNBOUNDED'])
else:
self._expect_one_of(['TIMERONCOST', 'CARDINALITY'])
self._expect_sequence(['FROM', TT.NUMBER])
if self._match('TO'):
self._expect_one_of(['UNBOUNDED', TT.NUMBER])
def _parse_routines_in_schema_clause(self, alter=False):
"""Parses a schema clause in a WORK CLASS definition"""
# ROUTINES already matched
if alter and self._match('ALL'):
pass
else:
self._expect_sequence(['IN', 'SCHEMA', TT.IDENTIFIER])
def _parse_position_clause(self):
"""Parses a POSITION clause in a WORK CLASS definition"""
# POSITION already matched
if self._match('AT'):
self._expect(TT.NUMBER)
elif self._match_one_of(['BEFORE', 'AFTER']):
self._expect(TT.IDENTIFIER)
elif self._match('LAST'):
pass
else:
self._expected_one_of(['AT', 'BEFORE', 'AFTER', 'LAST'])
def _parse_connection_attributes(self):
"""Parses connection attributes in a WORKLOAD"""
if self._match_one_of([(TT.REGISTER, 'APPLNAME'), (TT.REGISTER, 'SYSTEM_USER')]):
pass
elif self._match((TT.REGISTER, 'SESSION_USER')):
self._match('GROUP')
elif self._match('CURRENT'):
self._expect_one_of([
(TT.REGISTER, 'CLIENT_USERID'),
(TT.REGISTER, 'CLIENT_APPLNAME'),
(TT.REGISTER, 'CLIENT_WRKSTNNAME'),
(TT.REGISTER, 'CLIENT_ACCTNG')
])
else:
self._expected_one_of(['APPLNAME', 'SYSTEM_USER', 'SESSION_USER', 'CURRENT'])
self._expect('(')
while True:
if not self._match(TT.STRING):
self._expect(')')
break
def _parse_audit_policy(self, alter=False):
"""Parses an AUDIT POLICY definition"""
valid = set(['CATEGORIES', 'ERROR'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'CATEGORIES':
while True:
if self._expect_one_of([
'ALL',
'AUDIT',
'CHECKING',
'CONTEXT',
'EXECUTE',
'OBJMAINT',
'SECMAINT',
'SYSADMIN',
'VALIDATE'
]).value == 'EXECUTE':
if self._match_one_of(['WITH', 'WITHOUT']):
self._expect('DATA')
self._expect('STATUS')
self._expect_one_of(['BOTH', 'FAILURE', 'NONE', 'SUCCESS'])
if not self._match(','):
break
elif t == 'ERROR':
self._expect('TYPE')
self._expect_one_of(['NORMAL', 'AUDIT'])
# If we're defining a new policy, ensure both terms are specified
if not alter and valid:
self._expected(valid.pop())
def _parse_evm_group(self):
"""Parses an event monitor group in a non-wlm event monitor definition"""
while True:
self._expect(TT.IDENTIFIER)
if self._match('('):
valid = set(['TABLE', 'IN', 'PCTDEACTIVATE', 'TRUNC', 'INCLUDES', 'EXCLUDES'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'TABLE':
self._parse_table_name()
elif t == 'IN':
self._expect(TT.IDENTIFIER)
elif t == 'PCTDEACTIVATE':
self._expect(TT.NUMBER)
elif t == 'TRUNC':
pass
elif t == 'INCLUDES' or t == 'EXCLUDES':
self._expect('(')
while True:
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
self._expect(')')
self._expect(')')
if not self._match(','):
break
def _parse_evm_write_to(self):
"""Parses a WRITE TO clause in an event monitor definition"""
# WRITE TO already matched
if self._match('TABLE'):
valid = set(['BUFFERSIZE', 'BLOCKED', 'NONBLOCKED', 'evm-group'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
elif 'evm-group' in valid:
self._save_state()
try:
self._parse_evm_group()
valid.remove('evm-group')
except ParseError:
self._restore_state()
break
else:
self._forget_state()
else:
break
if t == 'BUFFERSIZE':
self._expect(TT.NUMBER)
elif t == 'BLOCKED':
valid.remove('NONBLOCKED')
elif t == 'NONBLOCKED':
valid.remove('BLOCKED')
elif self._match('PIPE'):
self._expect(TT.STRING)
elif self._match('FILE'):
self._expect(TT.STRING)
valid = set(['MAXFILES', 'MAXFILESIZE', 'BUFFERSIZE', 'BLOCKED', 'NONBLOCKED', 'APPEND', 'REPLACE'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'MAXFILES' or t == 'MAXFILESIZE':
self._expect_one_of(['NONE', TT.NUMBER])
elif t == 'BLOCKED':
valid.remove('NONBLOCKED')
elif t == 'NONBLOCKED':
valid.remove('BLOCKED')
elif t== 'APPEND':
valid.remove('REPLACE')
elif t == 'REPLACE':
valid.remove('APPEND')
else:
self._expected_one_of(['TABLE', 'PIPE', 'FILE'])
def _parse_evm_options(self):
"""Parses the options after an event monitor definition"""
valid = set(['WRITE', 'AUTOSTART', 'MANUALSTART', 'ON', 'LOCAL', 'GLOBAL'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'WRITE':
self._expect('TO')
self._parse_evm_write_to()
elif t == 'AUTOSTART':
valid.remove('MANUALSTART')
elif t == 'MANUALSTART':
valid.remove('AUTOSTART')
elif t == 'ON':
self._expect_one_of(['NODE', 'DBPARTITIONNUM'])
self._expect(TT.NUMBER)
elif t == 'LOCAL':
valid.remove('GLOBAL')
elif t == 'GLOBAL':
valid.remove('LOCAL')
def _parse_nonwlm_event_monitor(self):
"""Parses a non-wlm event monitor definition"""
while True:
if self._match_one_of(['DATABASE', 'TABLES', 'BUFFERPOOLS', 'TABLESPACES']):
pass
elif self._match('DEADLOCKS'):
if self._match_sequence(['WITH', 'DETAILS']):
if self._match('HISTORY'):
self._match('VALUES')
elif self._match_one_of(['CONNECTIONS', 'STATEMENTS', 'TRANSACTIONS']):
if self._match('WHERE'):
self._parse_search_condition()
else:
self._expected_one_of([
'DATABASE',
'TABLES',
'BUFFERPOOLS',
'TABLESPACES',
'DEADLOCKS',
'CONNECTIONS',
'STATEMENTS',
'TRANSACTIONS',
])
if not self._match(','):
break
self._parse_evm_options()
def _parse_wlm_event_monitor(self):
"""Parses a wlm event monitor definition"""
if self._expect_one_of(['ACTIVITIES', 'STATISTICS', 'THRESHOLD']).value == 'THRESHOLD':
self._expect('VIOLATIONS')
self._parse_evm_options()
# STATEMENTS #############################################################
def _parse_allocate_cursor_statement(self):
"""Parses an ALLOCATE CURSOR statement in a procedure"""
# ALLOCATE already matched
self._expect_sequence([TT.IDENTIFIER, 'CURSOR', 'FOR', 'RESULT', 'SET', TT.IDENTIFIER])
def _parse_alter_audit_policy_statement(self):
"""Parses an ALTER AUDIT POLICY statement"""
# ALTER AUDIT POLICY already matched
self._expect(IDENTIIER)
self._parse_audit_policy(alter=True)
def _parse_alter_bufferpool_statement(self):
"""Parses an ALTER BUFFERPOOL statement"""
# ALTER BUFFERPOOL already matched
self._expect(TT.IDENTIFIER)
if self._match('ADD'):
if self._expect_one_of(['NODEGROUP', 'DATABASE']).value == 'DATABASE':
self._expect_sequence(['PARTITION', 'GROUP'])
self._expect(TT.IDENTIFIER)
elif self._match('NUMBLOCKPAGES'):
self._expect(TT.NUMBER)
if self._match('BLOCKSIZE'):
self._expect(TT.NUMBER)
elif self._match('BLOCKSIZE'):
self._expect(TT.NUMBER)
elif self._match('NOT'):
self._expect_sequence(['EXTENDED', 'STORAGE'])
elif self._match('EXTENDED'):
self._expect('STORAGE')
else:
self._match_one_of(['IMMEDIATE', 'DEFERRED'])
if self._match_one_of(['DBPARTITIONNUM', 'NODE']):
self._expect(TT.NUMBER)
self._expect('SIZE')
if self._match(TT.NUMBER):
self._match('AUTOMATIC')
else:
self._expect_one_of([TT.NUMBER, 'AUTOMATIC'])
def _parse_alter_database_statement(self):
"""Parses an ALTER DATABASE statement"""
# ALTER DATABASE already matched
if not self._match('ADD'):
self._expect(TT.IDENTIFIER)
self._expect('ADD')
self._expect_sequence(['STORAGE', 'ON'])
while True:
self._expect(TT.STRING)
if not self._match(','):
break
def _parse_alter_function_statement(self, specific):
"""Parses an ALTER FUNCTION statement"""
# ALTER [SPECIFIC] FUNCTION already matched
self._parse_function_name()
if not specific and self._match('(', prespace=False):
if not self._match(')'):
self._parse_datatype_list()
self._expect(')')
first = True
while True:
if self._match('EXTERNAL'):
self._expect('NAME')
self._expect_one_of([TT.STRING, TT.IDENTIFIER])
elif self._match('NOT'):
self._expect_one_of(['FENCED', 'THREADSAFE'])
elif self._match_one_of(['FENCED', 'THREADSAFE']):
pass
elif first:
self._expected_one_of([
'EXTERNAL',
'NOT',
'FENCED',
'THREADSAFE',
])
else:
break
first = False
def _parse_alter_partition_group_statement(self):
"""Parses an ALTER DATABASE PARTITION GROUP statement"""
# ALTER [DATABASE PARTITION GROUP|NODEGROUP] already matched
self._expect(TT.IDENTIFIER)
while True:
if self._match('ADD'):
self._parse_db_partition_list_clause(size=False)
if self._match('LIKE'):
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
elif self._match('WITHOUT'):
self._expect('TABLESPACES')
elif self._match('DROP'):
self._parse_db_partition_list_clause(size=False)
else:
self._expected_one_of(['ADD', 'DROP'])
if not self._match(','):
break
def _parse_alter_histogram_template_statement(self):
"""Parses an ALTER HISTOGRAM TEMPLATE statement"""
# ALTER HISTOGRAM TEMPLATE already matched
self._expect_sequence([TT.IDENTIFIER, 'HIGH', 'BIN', 'VALUE', TT.NUMBER])
def _parse_alter_module_statement(self):
"""Parses an ALTER MODULE statement"""
# ALTER MODULE already matched
self._parse_module_name()
if self._match_one_of(['ADD', 'PUBLISH']):
self._match_sequence(['OR', 'REPLACE'])
if self._match('CONDITION'):
self._expect(TT.IDENTIFIER)
if self._match('FOR'):
if self._match('SQLSTATE'):
self._match('VALUE')
self._expect(TT.STRING)
elif self._match('FUNCTION'):
self._parse_create_function_statement()
elif self._match('PROCEDURE'):
self._parse_create_procedure_statement()
elif self._match('TYPE'):
self._parse_create_type_statement()
elif self._match('VARIABLE'):
self._parse_create_variable_statement()
elif self._match('DROP'):
if not self._match('BODY'):
if self._match('CONDITION'):
self._expect(TT.IDENTIFIER)
elif self._match_one_of(['FUNCTION', 'PROCEDURE']):
self._parse_routine_name()
if self._match('(', prespace=False):
self._parse_datatype_list()
self._expect(')')
elif self._match('SPECIFIC'):
self._expect_one_of(['FUNCTION', 'PROCEDURE'])
self._parse_routine_name()
elif self._match('TYPE'):
self._parse_type_name()
elif self._match('VARIABLE'):
self._parse_variable_name()
else:
self._expected_one_of([
'BODY',
'CONDITION',
'FUNCTION',
'PROCEDURE',
'SPECIFIC',
'TYPE',
'VARIABLE',
])
else:
self._expected_one_of(['ADD', 'DROP', 'PUBLISH'])
def _parse_alter_nickname_statement(self):
"""Parses an ALTER NICKNAME statement"""
# ALTER NICKNAME already matched
self._parse_nickname_name()
if self._match('OPTIONS'):
self._parse_federated_options(alter=True)
while True:
if self._match('ADD'):
self._parse_table_constraint()
elif self._match('ALTER'):
if self._match('FOREIGN'):
self._expect('KEY')
self._parse_constraint_alteration()
elif self._match('CHECK'):
self._parse_constraint_alteration()
else:
# Ambiguity: A column can be called COLUMN
self._save_state()
try:
self._match('COLUMN')
self._parse_federated_column_alteration()
except ParseError:
self._restore_state()
self._parse_federated_column_alteration()
else:
self._forget_state()
elif self._match('DROP'):
if self._match('PRIMARY'):
self._expect('KEY')
elif self._match('FOREIGN'):
self._expect_sequence(['KEY', TT.IDENTIFIER])
elif self._match_one_of(['UNIQUE', 'CHECK', 'CONSTRAINT']):
self._expect(TT.IDENTIFIER)
else:
self._expected_one_of(['PRIMARY', 'FOREIGN', 'CHECK', 'CONSTRAINT'])
elif self._match_one_of(['ALLOW', 'DISALLOW']):
self._expect('CACHING')
else:
break
self._newline()
def _parse_alter_procedure_statement(self, specific):
"""Parses an ALTER PROCEDURE statement"""
# ALTER [SPECIFIC] PROCEDURE already matched
self._parse_procedure_name()
if not specific and self._match('(', prespace=False):
if not self._match(')'):
self._parse_datatype_list()
self._expect(')')
first = True
while True:
if self._match('EXTERNAL'):
if self._match('NAME'):
self._expect([TT.STRING, TT.IDENTIFIER])
elif self._match('ACTION'):
pass
else:
self._expected_one_of(['NAME', 'ACTION'])
elif self._match('NOT'):
self._expect_one_of(['FENCED', 'THREADSAFE'])
elif self._match_one_of(['FENCED', 'THREADSAFE']):
pass
elif self._match('NO'):
self._expect_sequence(['EXTERNAL', 'ACTION'])
elif self._match('NEW'):
self._expect_sequence(['SAVEPOINT', 'LEVEL'])
elif self._match('ALTER'):
self._expect_sequence(['PARAMETER', TT.IDENTIFIER, 'SET', 'DATA', 'TYPE'])
self._parse_datatype()
elif first:
self._expected_one_of([
'EXTERNAL',
'NOT',
'FENCED',
'NO',
'EXTERNAL',
'THREADSAFE',
'ALTER',
])
else:
break
first = False
def _parse_alter_security_label_component_statement(self):
"""Parses an ALTER SECURITY LABEL COMPONENT statement"""
# ALTER SECURITY LABEL COMPONENT already matched
self._expect_sequence(TT.IDENTIFIER, 'ADD', 'ELEMENT', TT.STRING)
if self._match_one_of(['BEFORE', 'AFTER']):
self._expect(TT.STRING)
elif self._match('ROOT'):
pass
elif self._match('UNDER'):
self._expect(TT.STRING)
if self._match('OVER'):
while True:
self._expect(TT.STRING)
if not self._match(','):
break
self._expect('OVER')
def _parse_alter_security_policy_statement(self):
"""Parses an ALTER SECURITY POLICY statement"""
# ALTER SECURITY POLICY
self._expect(TT.IDENTIFIER)
while True:
if self._match('ADD'):
self._expect_sequence(['SECURITY', 'LABEL', 'COMPONENT', TT.IDENTIFIER])
elif self._match_one_of(['OVERRIDE', 'RESTRICT']):
self._expect_sequence(['NOT', 'AUTHORIZED', 'WRITE', 'SECURITY', 'LABEL'])
elif self._match_one_of(['USE', 'IGNORE']):
self._expect_one_of(['GROUP', 'ROLE'])
self._expect('AUTHORIZATIONS')
else:
break
def _parse_alter_sequence_statement(self):
"""Parses an ALTER SEQUENCE statement"""
# ALTER SEQUENCE already matched
self._parse_sequence_name()
self._parse_identity_options(alter='SEQUENCE')
def _parse_alter_server_statement(self):
"""Parses an ALTER SERVER statement"""
# ALTER SERVER already matched
self._parse_remote_server()
if self._match('OPTIONS'):
self._parse_federated_options(alter=True)
def _parse_alter_service_class_statement(self):
"""Parses an ALTER SERVICE CLASS statement"""
# ALTER SERVICE CLASS already matched
self._expect(TT.IDENTIFIER)
if self._match('UNDER'):
self._expect(TT.IDENTIFIER)
first = True
while True:
if self._match('AGENT'):
self._expect('PRIORITY')
self._expect_one_of(['DEFAULT', TT.NUMBER])
elif self._match('PREFETCH'):
self._expect('PRIORITY')
self._expect_one_of(['LOW', 'MEDIUM', 'HIGH', 'DEFAULT'])
elif self._match('OUTBOUND'):
self._expect('CORRELATOR')
self._expect_one_of(['NONE', TT.STRING])
elif self._match('COLLECT'):
if self._match('ACTIVITY'):
self._expect('DATA')
if self._match('ON'):
if self._match('ALL'):
self._match_sequence(['DATABASE', 'PARTITIONS'])
elif self._match('COORDINATOR'):
self._match_sequence(['DATABASE', 'PARTITION'])
else:
self._expected_one_of(['ALL', 'COORDINATOR'])
self._expect_one_of(['WITH', 'WITHOUT'])
self._expect('DETAILS')
self._match_sequence(['AND', 'VALUES'])
elif self._match('NONE'):
pass
else:
self._expected_one_of(['ON', 'NONE'])
elif self._match('AGGREGATE'):
if self._match('ACTIVITY'):
self._expect('DATA')
self._match_one_of(['BASE', 'EXTENDED', 'NONE'])
elif self._match('REQUEST'):
self._expect('DATA')
self._match_one_of(['BASE', 'NONE'])
else:
self._expected_one_of(['ACTIVITY', 'REQUEST'])
else:
self._expected_one_of(['ACTIVITY', 'AGGREGATE'])
elif self._match('ACTIVITY'):
self._expect_one_of(['LIFETIME', 'QUEUETIME', 'EXECUTETIME', 'ESTIMATEDCOST', 'INTERARRIVALTIME'])
self._expect_sequence(['HISTOGRAM', 'TEMPLATE', TT.IDENTIFIER])
elif self._match('REQUEST'):
self._expect_sequence(['EXECUTETIME', 'HISTOGRAM', 'TEMPLATE', TT.IDENTIFIER])
elif self._match_one_of(['ENABLE', 'DISABLE']):
pass
elif not first:
break
else:
self._expected_one_of([
'AGENT',
'PREFETCH',
'OUTBOUND',
'COLLECT',
'ACTIVITY',
'REQUEST',
'ENABLE',
'DISABLE'
])
def _parse_alter_table_statement(self):
"""Parses an ALTER TABLE statement"""
# ALTER TABLE already matched
self._parse_table_name()
self._indent()
while True:
if self._match('ADD'):
if self._match('RESTRICT'):
self._expect_sequence(['ON', 'DROP'])
elif self._match('PARTITION'):
# Ambiguity: optional partition name
self._save_state()
try:
self._match(TT.IDENTIFIER)
self._parse_partition_boundary()
except ParseError:
self._restore_state()
self._parse_partition_boundary()
else:
self._forget_state()
if self._match('IN'):
self._expect(TT.IDENTIFIER)
if self._match('LONG'):
self._expect('IN')
self._expect(TT.IDENTIFIER)
elif self._match('MATERIALIZED'):
self._expect('QUERY')
self._expect('(')
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
self._parse_refreshable_table_options(alter=True)
elif self._match('QUERY'):
self._expect('(')
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
self._parse_refreshable_table_options(alter=True)
elif self._match('('):
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
self._parse_refreshable_table_options(alter=True)
elif self._match('COLUMN'):
self._parse_column_definition()
elif self._match('SECURITY'):
self._expect('POLICY')
self._expect(TT.IDENTIFIER)
else:
self._save_state()
try:
# Try parsing a table constraint definition
self._parse_table_constraint()
except ParseError:
# If that fails, rewind and try and parse a column definition
self._restore_state()
self._parse_column_definition()
else:
self._forget_state()
elif self._match('ATTACH'):
self._expect('PARTITION')
# Ambiguity: optional partition name
self._save_state()
try:
self._match(TT.IDENTIFIER)
self._parse_partition_boundary()
except ParseError:
self._restore_state()
self._parse_partition_boundary()
else:
self._forget_state()
self._expect('FROM')
self._parse_table_name()
elif self._match('DETACH'):
self._expect_sequence(['PARTITION', TT.IDENTIFIER, 'FROM'])
self._parse_table_name()
elif self._match('ALTER'):
if self._match('FOREIGN'):
self._expect('KEY')
self._parse_constraint_alteration()
elif self._match('CHECK'):
self._parse_constraint_alteration()
else:
# Ambiguity: A column can be called COLUMN
self._save_state()
try:
self._match('COLUMN')
self._parse_column_alteration()
except ParseError:
self._restore_state()
self._parse_column_alteration()
else:
self._forget_state()
elif self._match('RENAME'):
self._match('COLUMN')
self._expect_sequence([TT.IDENTIFIER, 'TO', TT.IDENTIFIER])
elif self._match('DROP'):
if self._match('PRIMARY'):
self._expect('KEY')
elif self._match('FOREIGN'):
self._expect_sequence(['KEY', TT.IDENTIFIER])
elif self._match_one_of(['UNIQUE', 'CHECK', 'CONSTRAINT']):
self._expect(TT.IDENTIFIER)
elif self._match('COLUMN'):
self._expect(TT.IDENTIFIER)
self._match_one_of(['CASCADE', 'RESTRICT'])
elif self._match('RESTRICT'):
self._expect_sequence(['ON', 'DROP'])
elif self._match('DISTRIBUTION'):
pass
elif self._match('MATERIALIZED'):
self._expect('QUERY')
elif self._match('QUERY'):
pass
elif self._match('SECURITY'):
self._expect('POLICY')
else:
self._expect(TT.IDENTIFIER)
self._match_one_of(['CASCADE', 'RESTRICT'])
elif self._match('DATA'):
self._expect('CAPTURE')
if self._match('CHANGES'):
self._match_sequence(['INCLUDE', 'LONGVAR', 'COLUMNS'])
elif self._match('NONE'):
pass
else:
self._expected_one_of(['NONE', 'CHANGES'])
elif self._match('PCTFREE'):
self._expect(TT.NUMBER)
elif self._match('LOCKSIZE'):
self._expect_one_of(['ROW', 'BLOCKINSERT', 'TABLE'])
elif self._match('APPEND'):
self._expect_one_of(['ON', 'OFF'])
elif self._match('VOLATILE'):
self._match('CARDINALITY')
elif self._match('NOT'):
self._expect('VOLATILE')
self._match('CARDINALITY')
elif self._match('COMPRESS'):
self._expect_one_of(['YES', 'NO'])
elif self._match('ACTIVATE'):
if self._expect_one_of(['NOT', 'VALUE']).value == 'NOT':
self._expect_sequence(['LOGGED', 'INITIALLY'])
if self._match('WITH'):
self._expect_sequence(['EMPTY', 'TABLE'])
else:
self._expect('COMPRESSION')
elif self._match('DEACTIVATE'):
self._expect_sequence(['VALUE', 'COMPRESSION'])
else:
break
self._newline()
self._outdent()
def _parse_alter_tablespace_statement(self):
"""Parses an ALTER TABLESPACE statement"""
# ALTER TABLESPACE already matched
self._expect(TT.IDENTIFIER)
first = True
while True:
if self._match('ADD'):
if self._match('TO'):
self._expect_sequence(['STRIPE', 'SET', TT.IDENTIFIER])
self._parse_database_container_clause()
if self._match('ON'):
self._parse_db_partition_list_clause(size=False)
else:
# Ambiguity: could be a Database or a System container
# clause here
reraise = False
self._save_state()
try:
# Try a database clause first
self._parse_database_container_clause()
reraise = True
if self._match('ON'):
self._parse_db_partition_list_clause(size=False)
except ParseError:
# If that fails, rewind and try a system container
# clause
self._restore_state()
if reraise: raise
self._parse_system_container_clause()
self._parse_db_partition_list_clause(size=False)
else:
self._forget_state()
elif self._match('BEGIN'):
self._expect_sequence(['NEW', 'STRIPE', 'SET'])
self._parse_database_container_clause()
if self._match('ON'):
self._parse_db_partition_list_clause(size=False)
elif self._match('DROP'):
self._parse_database_container_clause(size=False)
if self._match('ON'):
self._parse_db_partition_list_clause(size=False)
elif self._match_one_of(['EXTEND', 'REDUCE']):
# Ambiguity: could be a Database or ALL containers clause
reraise = False
self._save_state()
try:
# Try an ALL containers clause first
self._expect_sequence(['(', 'ALL'])
reraise = True
self._match('CONTAINERS')
self._expect(TT.NUMBER)
self._match_one_of(['K', 'M', 'G'])
self._expect(')')
except ParseError:
# If that fails, rewind and try a database container clause
self._restore_state()
if reraise: raise
self._parse_database_container_clause()
else:
self._forget_state()
if self._match('ON'):
self._parse_db_partition_list_clause(size=False)
elif self._match('PREFETCHSIZE'):
if not self._match('AUTOMATIC'):
self._expect(TT.NUMBER)
self._match_one_of(['K', 'M', 'G'])
elif self._match('BUFFERPOOL'):
self._expect(TT.IDENTIFIER)
elif self._match('OVERHEAD'):
self._expect(TT.NUMBER)
elif self._match('TRANSFERRATE'):
self._expect(TT.NUMBER)
elif self._match('NO'):
self._expect_sequence(['FILE', 'SYSTEM', 'CACHING'])
elif self._match('FILE'):
self._expect_sequence(['SYSTEM', 'CACHING'])
elif self._match('DROPPED'):
self._expect_sequence(['TABLE', 'RECOVERY'])
self._expect_one_of(['ON', 'OFF'])
elif self._match('SWITCH'):
self._expect('ONLINE')
elif self._match('INCREASESIZE'):
self._expect(TT.NUMBER)
self._expect_one_of(['K', 'M', 'G', 'PERCENT'])
elif self._match('MAXSIZE'):
if not self_match('NONE'):
self._expect(TT.NUMBER)
self._expect_one_of(['K', 'M', 'G'])
elif self._match('CONVERT'):
self._expect_sequence(['TO', 'LARGE'])
elif first:
self._expected_one_of([
'ADD',
'BEGIN',
'DROP'
'EXTEND',
'REDUCE',
'PREFETCHSIZE',
'BUFFERPOOL',
'OVERHEAD',
'TRANSFERRATE',
'NO',
'FILE',
'DROPPED',
'SWITCH',
'INCREASESIZE',
'MAXSIZE',
'CONVERT',
])
else:
break
first = False
def _parse_alter_threshold_statement(self):
"""Parses an ALTER THRESHOLD statement"""
# ALTER THRESHOLD already matched
self._expect(TT.IDENTIFIER)
while True:
if self._match('WHEN'):
self._parse_threshold_predicate()
self._parse_threshold_exceeded_actions()
elif not self._match_one_of(['ENABLE', 'DISABLE']):
break
def _parse_alter_trusted_context_statement(self):
"""Parses an ALTER TRUSTED CONTEXT statement"""
# ALTER TRUSTED CONTEXT already matched
self._expect(TT.IDENTIFIER)
first = True
while True:
if self._match('ADD'):
if self._match('ATTRIBUTES'):
self._expect('(')
while True:
self._expect_sequence(['ADDRESS', TT.STRING])
if self._match('WITH'):
self._expect_sequence(['ENCRYPTION', TT.STRING])
if not self._match(','):
break
self._expect(')')
elif self._match('USE'):
self._expect('FOR')
while True:
if not self._match('PUBLIC'):
self._expect(TT.IDENTIFIER)
self._match_sequence(['ROLE', TT.IDENTIFIER])
if self._match_one_of(['WITH', 'WITHOUT']):
self._expect('AUTHENTICATION')
if not self._match(','):
break
else:
self._expected_one_of(['ATTRIBUTES', 'USE'])
elif self._match('DROP'):
if self._match('ATTRIBUTES'):
self._expect('(')
while True:
self._expect_sequence(['ADDRESS', TT.STRING])
if not self._match(','):
break
self._expect(')')
elif self._match('USE'):
self._expect('FOR')
while True:
if not self._match('PUBLIC'):
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
else:
self._expected_one_of(['ATTRIBUTES', 'USE'])
elif self._match('ALTER'):
while True:
if self._match('SYSTEM'):
self._expect_sequence(['AUTHID', TT.IDENTIFIER])
elif self._match('ATTRIBUTES'):
self._expect('(')
while True:
self._expect_one_of(['ADDRESS', 'ENCRYPTION'])
self._expect(TT.STRING)
if not self._match(','):
break
self._expect(')')
elif self._match('NO'):
self._expect_sequence(['DEFAULT', 'ROLE'])
elif self._match('DEFAULT'):
self._expect_sequence(['ROLE', TT.IDENTIFIER])
elif not self._match_one_of(['ENABLE', 'DISABLE']):
break
elif self._match('REPLACE'):
self._expect_sequence(['USE', 'FOR'])
while True:
if not self._match('PUBLIC'):
self._expect(TT.IDENTIFIER)
self._match_sequence(['ROLE', TT.IDENTIFIER])
if self._match_one_of(['WITH', 'WITHOUT']):
self._expect('AUTHENTICATION')
if not self._match(','):
break
elif first:
self._expected_one_of(['ALTER', 'ADD', 'DROP', 'REPLACE'])
else:
break
first = False
def _parse_alter_user_mapping_statement(self):
"""Parses an ALTER USER MAPPING statement"""
# ALTER USER MAPPING already matched
if not self._match('USER'):
self._expect_sequence([TT.IDENTIFIER, 'SERVER', TT.IDENTIFIER, 'OPTIONS'])
self._parse_federated_options(alter=True)
def _parse_alter_view_statement(self):
"""Parses an ALTER VIEW statement"""
# ALTER VIEW already matched
self._parse_view_name()
self._expect_one_of(['ENABLE', 'DISABLE'])
self._expect_sequence(['QUERY', 'OPTIMIZATION'])
def _parse_alter_work_action_set_statement(self):
"""Parses an ALTER WORK ACTION SET statement"""
# ALTER WORK ACTION SET already matched
self._expect(TT.IDENTIFIER)
first = True
while True:
if self._match('ADD'):
self._match_sequence(['WORK', 'ACTION'])
self._expect_sequence([TT.IDENTIFIER, 'ON', 'WORK', 'CLASS', TT.IDENTIFIER])
self._parse_action_types_clause()
self._parse_histogram_template_clause()
self._match_one_of(['ENABLE', 'DISABLE'])
elif self._match('ALTER'):
self._match_sequence(['WORK', 'ACTION'])
self._expect(TT.IDENTIFIER)
while True:
if self._match('SET'):
self._expect_sequence(['WORK', 'CLASS', TT.IDENTIFIER])
elif self._match('ACTIVITY'):
self._expect_one_of(['LIFETIME', 'QUEUETIME', 'EXECUTETIME', 'ESIMATEDCOST', 'INTERARRIVALTIME'])
self._expect_sequence(['HISTOGRAM', 'TEMPLATE', TT.IDENTIFIER])
elif self._match_one_of(['ENABLE', 'DISABLE']):
pass
else:
# Ambiguity: could be the end of the loop, or an action
# types clause
self._save_state()
try:
self._parse_action_types_clause()
except ParseError:
self._restore_state()
break
else:
self._forget_state()
elif self._match('DROP'):
self._match_sequence(['WORK', 'ACTION'])
self._expect(TT.IDENTIFIER)
elif self._match_one_of(['ENABLE', 'DISABLE']):
pass
elif first:
self._expected_one_of(['ADD', 'ALTER', 'DROP', 'ENABLE', 'DISABLE'])
else:
break
first = False
def _parse_alter_work_class_set_statement(self):
"""Parses an ALTER WORK CLASS SET statement"""
# ALTER WORK CLASS SET already matched
self._expect(TT.IDENTIFIER)
outer = True
while True:
if self._match('ADD'):
self._match_sequence(['WORK', 'CLASS'])
self._expect(TT.IDENTIFIER)
self._parse_work_attributes()
self._expect('POSITION')
self._parse_position_clause()
elif self._match('ALTER'):
self._match_sequence(['WORK', 'CLASS'])
self._expect(TT.IDENTIFIER)
inner = True
while True:
if self._match('FOR'):
self._parse_for_from_to_clause(alter=True)
elif self._match('POSITION'):
self._parse_position_clause()
elif self._match('ROUTINES'):
self._parse_routines_in_schema_clause(alter=True)
elif inner:
self._expected_one_of(['FOR', 'POSITION', 'ROUTINES'])
else:
break
inner = False
elif self._match('DROP'):
self._match_sequence(['WORK', 'CLASS'])
self._expect(TT.IDENTIFIER)
elif outer:
self._expected_one_of(['ADD', 'ALTER', 'DROP'])
else:
break
outer = False
def _parse_alter_workload_statement(self):
"""Parses an ALTER WORKLOAD statement"""
self._expect(TT.IDENTIFIER)
first = True
while True:
if self._match('ADD'):
self._parse_connection_attributes()
elif self._match('DROP'):
self._parse_connection_attributes()
elif self._match_one_of(['ALLOW', 'DISALLOW']):
self._expect_sequence(['DB', 'ACCESS'])
elif self._match_one_of(['ENABLE', 'DISABLE']):
pass
elif self._match('SERVICE'):
self._expect_sequence(['CLASS', TT.IDENTIFIER])
if self._match('UNDER'):
self._expect(TT.IDENTIFIER)
elif self._match('POSITION'):
self._parse_position_clause()
elif self._match_sequence(['COLLECT', 'ACTIVITY', 'DATA']):
self._parse_collect_activity_data_clause(alter=True)
elif first:
self._expected_one_of([
'ADD',
'DROP',
'ALLOW',
'DISALLOW',
'ENABLE',
'DISABLE',
'SERVICE',
'POSITION',
'COLLECT'
])
else:
break
first = False
def _parse_alter_wrapper_statement(self):
"""Parses an ALTER WRAPPER statement"""
# ALTER WRAPPER already matched
self._expect(TT.IDENTIFIER)
self._expect('OPTIONS')
self._parse_federated_options(alter=True)
def _parse_associate_locators_statement(self):
"""Parses an ASSOCIATE LOCATORS statement in a procedure"""
# ASSOCIATE already matched
self._match_sequence(['RESULT', 'SET'])
self._expect_one_of(['LOCATOR', 'LOCATORS'])
self._expect('(')
self._parse_ident_list()
self._expect(')')
self._expect_sequence(['WITH', 'PROCEDURE'])
self._parse_procedure_name()
def _parse_audit_statement(self):
"""Parses an AUDIT statement"""
# AUDIT already matched
while True:
if self._match_one_of([
'DATABASE',
'SYSADM',
'SYSCTRL',
'SYSMAINT',
'SYSMON',
'SECADM',
'DBADM',
]):
pass
elif self._match('TABLE'):
self._parse_table_name()
elif self._match_sequence(['TRUSTED', 'CONTEXT']):
self._expect(TT.IDENTIFIER)
elif self._match_one_of(['USER', 'GROUP', 'ROLE']):
self._expect(TT.IDENTIFIER)
else:
self._expected_one_of([
'DATABASE',
'SYSADM',
'SYSCTRL',
'SYSMAINT',
'SYSMON',
'SECADM',
'DBADM',
'TABLE',
'TRUSTED',
'USER',
'GROUP',
'ROLE',
])
if not self._match(','):
break
if self._match_one_of(['USING', 'REPLACE']):
self._expect_sequence(['POLICY', TT.IDENTIFIER])
elif not self._match_sequence(['REMOVE', 'POLICY']):
self._expected_one_of(['USING', 'REPLACE', 'REMOVE'])
def _parse_call_statement(self):
"""Parses a CALL statement"""
# CALL already matched
self._parse_subschema_name()
if self._match('(', prespace=False):
if not self._match(')'):
while True:
# Try and parse an optional parameter name
self._save_state()
try:
self._expect(TT.IDENTIFIER)
self._expect('=>')
except ParseError:
self._restore_state()
# Parse the parameter value
self._parse_expression()
if not self._match(','):
break
self._expect(')')
def _parse_case_statement(self):
"""Parses a CASE-conditional in a procedure"""
# CASE already matched
if self._match('WHEN'):
# Parse searched-case-statement
simple = False
self._indent(-1)
else:
# Parse simple-case-statement
self._parse_expression()
self._indent()
self._expect('WHEN')
simple = True
# Parse WHEN clauses (only difference is predicate/expression after
# WHEN)
t = None
while True:
if simple:
self._parse_expression()
else:
self._parse_search_condition()
self._expect('THEN')
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
t = self._match_one_of(['WHEN', 'ELSE', 'END'])
if t:
self._outdent(-1)
t = t.value
break
else:
self._newline()
if t != 'WHEN':
break
# Handle ELSE clause (common to both variations)
if t == 'ELSE':
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
if self._match('END'):
self._outdent(-1)
break
else:
self._newline()
self._outdent(-1)
self._expect('CASE')
def _parse_close_statement(self):
"""Parses a CLOSE cursor statement"""
# CLOSE already matched
self._expect(TT.IDENTIFIER)
self._match_sequence(['WITH', 'RELEASE'])
def _parse_comment_statement(self):
"""Parses a COMMENT ON statement"""
# COMMENT ON already matched
# Ambiguity: table/view can be called TABLE, VIEW, ALIAS, etc.
reraise = False
self._save_state()
try:
# Try parsing an extended TABLE/VIEW comment first
self._parse_relation_name()
self._expect('(')
self._indent()
while True:
self._expect(TT.IDENTIFIER)
self._valign()
self._expect_sequence(['IS', TT.STRING])
reraise = True
if self._match(','):
self._newline()
else:
break
self._vapply()
self._outdent()
self._expect(')')
except ParseError:
# If that fails, rewind and parse a single-object comment
self._restore_state()
if reraise: raise
if self._match_one_of(['ALIAS', 'TABLE', 'NICKNAME', 'INDEX', 'TRIGGER', 'VARIABLE']):
self._parse_subschema_name()
elif self._match('TYPE'):
if self._match('MAPPING'):
self._expect(TT.IDENTIFIER)
else:
self._parse_subschema_name()
elif self._match('PACKAGE'):
self._parse_subschema_name()
self._match('VERSION')
# XXX Ambiguity: IDENTIFIER will match "IS" below. How to solve
# this? Only double-quoted identifiers are actually permitted
# here (or strings)
self._match_one_of([TT.IDENTIFIER, TT.STRING])
elif self._match_one_of(['DISTINCT', 'DATA']):
self._expect('TYPE')
self._parse_type_name()
elif self._match_one_of(['COLUMN', 'CONSTRAINT']):
self._parse_subrelation_name()
elif self._match_one_of(['SCHEMA', 'TABLESPACE', 'WRAPPER', 'WORKLOAD', 'NODEGROUP', 'ROLE', 'THRESHOLD']):
self._expect(TT.IDENTIFIER)
elif self._match_sequence(['DATABASE', 'PARTITION', 'GROUP']):
self._expect(TT.IDENTIFIER)
elif self._match_sequence(['AUDIT', 'POLICY']):
self._expect(TT.IDENTIFIER)
elif self._match_sequence(['SECURITY', 'POLICY']):
self._expect(TT.IDENTIFIER)
elif self._match_sequence(['SECURITY', 'LABEL']):
self._match('COMPONENT')
self._expect(TT.IDENTIFIER)
elif self._match('SERVER'):
if self._match('OPTION'):
self._expect_sequence([TT.IDENTIFIER, 'FOR'])
self._parse_remote_server()
else:
self._expect(TT.IDENTIFIER)
elif self._match('SERVICE'):
self._expect('CLASS')
self._expect(TT.IDENTIFIER)
self._match_sequence(['UNDER', TT.IDENTIFIER])
elif self._match_sequence(['TRUSTED', 'CONTEXT']):
self._expect(TT.IDENTIFIER)
elif self._match_sequence(['HISTOGRAM', 'TEMPLATE']):
self._expect(TT.IDENTIFIER)
elif self._match_sequence(['WORK', 'ACTION', 'SET']):
self._expect(TT.IDENTIFIER)
elif self._match_sequence(['WORK', 'CLASS', 'SET']):
self._expect(TT.IDENTIFIER)
elif self._match('FUNCTION'):
if self._match('MAPPING'):
self._expect(TT.IDENTIFIER)
else:
self._parse_routine_name()
if self._match('(', prespace=False):
self._parse_datatype_list()
self._expect(')')
elif self._match('PROCEDURE'):
self._parse_routine_name()
if self._match('(', prespace=False):
self._parse_datatype_list()
self._expect(')')
elif self._match('SPECIFIC'):
self._expect_one_of(['FUNCTION', 'PROCEDURE'])
self._parse_routine_name()
else:
self._expected_one_of([
'ALIAS',
'AUDIT',
'COLUMN',
'CONSTRAINT',
'DATA',
'DATABASE',
'DISTINCT',
'FUNCTION',
'HISTOGRAM',
'INDEX',
'NICKNAME',
'PROCEDURE',
'ROLE',
'SCHEMA',
'SECURITY',
'SERVER',
'SERVICE',
'SPECIFIC',
'TABLE',
'TABLESPACE',
'THRESHOLD',
'TRIGGER',
'TRUSTED',
'TYPE',
'VARIABLE',
'WORK',
'WORKLOAD',
'WRAPPER',
])
self._expect_sequence(['IS', TT.STRING])
else:
self._forget_state()
def _parse_commit_statement(self):
"""Parses a COMMIT statement"""
# COMMIT already matched
self._match('WORK')
def _parse_create_alias_statement(self):
"""Parses a CREATE ALIAS statement"""
# CREATE ALIAS already matched
self._parse_relation_name()
self._expect('FOR')
self._parse_relation_name()
def _parse_create_audit_policy_statement(self):
"""Parses a CREATE AUDIT POLICY statement"""
# CREATE AUDIT POLICY already matched
self._expect(TT.IDENTIFIER)
self._parse_audit_policy()
def _parse_create_bufferpool_statement(self):
"""Parses a CREATE BUFFERPOOL statement"""
# CREATE BUFFERPOOL already matched
self._expect(TT.IDENTIFIER)
self._match_one_of(['IMMEDIATE', 'DEFERRED'])
if self._match('ALL'):
self._expect('DBPARTITIONNUMS')
elif self._match('DATABASE'):
self._expect_sequence(['PARTITION', 'GROUP'])
self._parse_ident_list()
elif self._match('NODEGROUP'):
self._parse_ident_list()
self._expect('SIZE')
if self._match(TT.NUMBER):
self._match('AUTOMATIC')
elif self._match('AUTOMATIC'):
pass
else:
self._expected_one_of([TT.NUMBER, 'AUTOMATIC'])
# Parse function options (which can appear in any order)
valid = set(['NUMBLOCKPAGES', 'PAGESIZE', 'EXTENDED', 'EXCEPT', 'NOT'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if self._match('EXCEPT'):
self._expect('ON')
self._parse_db_partition_list_clause(size=True)
elif t == 'NUMBLOCKPAGES':
self._expect(TT.NUMBER)
if self._match('BLOCKSIZE'):
self._expect(TT.NUMBER)
elif t == 'PAGESIZE':
self._expect(TT.NUMBER)
self._match('K')
elif t == 'EXTENDED':
self._expect('STORAGE')
valid.remove('NOT')
elif t == 'NOT':
self._expect_sequence(['EXTENDED', 'STORAGE'])
valid.remove('EXTENDED')
def _parse_create_database_partition_group_statement(self):
"""Parses an CREATE DATABASE PARTITION GROUP statement"""
# CREATE [DATABASE PARTITION GROUP|NODEGROUP] already matched
self._expect(TT.IDENTIFIER)
if self._match('ON'):
if self._match('ALL'):
self._expect_one_of(['DBPARTITIONNUMS', 'NODES'])
else:
self._parse_db_partition_list_clause(size=False)
def _parse_create_event_monitor_statement(self):
"""Parses a CREATE EVENT MONITOR statement"""
# CREATE EVENT MONITOR already matched
self._expect(TT.IDENTIFIER)
self._expect('FOR')
self._save_state()
try:
self._parse_wlm_event_monitor()
except ParseError:
self._restore_state()
self._parse_nonwlm_event_monitor()
else:
self._forget_state()
def _parse_create_function_statement(self):
"""Parses a CREATE FUNCTION statement"""
# CREATE FUNCTION already matched
self._parse_function_name()
# Parse parameter list
self._expect('(', prespace=False)
if not self._match(')'):
while True:
self._save_state()
try:
self._expect(TT.IDENTIFIER)
self._parse_datatype()
except ParseError:
self._restore_state()
self._parse_datatype()
else:
self._forget_state()
self._match_sequence(['AS', 'LOCATOR'])
if not self._match(','):
break
self._expect(')')
self._indent()
# Parse function options (which can appear in any order)
valid = set([
'ALLOW',
'CALLED',
'CARDINALITY',
'CONTAINS',
'DBINFO',
'DETERMINISTIC',
'DISALLOW',
'EXTERNAL',
'FENCED',
'FINAL',
'INHERIT',
'LANGUAGE',
'MODIFIES',
'NO',
'NOT',
'NULL',
'PARAMETER',
'READS',
'RETURNS',
'SCRATCHPAD',
'SPECIFIC',
'STATIC',
'THREADSAFE',
'TRANSFORM',
'VARIANT',
])
while True:
# Ambiguity: INHERIT SPECIAL REGISTERS (which appears in the
# variable order options) and INHERIT ISOLATION LEVEL (which must
# appear after the variable order options). See below.
self._save_state()
try:
t = self._match_one_of(valid)
if t:
t = t.value
# Note that matches aren't removed from valid, because it's
# simply too complex to figure out what option disallows
# other options in many cases
else:
# break would skip the except and else blocks
raise ParseBacktrack()
if t == 'ALLOW':
self._expect('PARALLEL')
if self._match_sequence(['EXECUTE', 'ON', 'ALL']):
self._match_sequence(['DATABASE', 'PARTITIONS'])
self._expect_sequence(['RESULT', 'TABLE', 'DISTRIBUTED'])
elif t == 'CALLED':
self._expect_sequence(['ON', 'NULL', 'INPUT'])
elif t == 'CARDINALITY':
self._expect(TT.NUMBER)
elif t == 'CONTAINS':
self._expect('SQL')
elif t == 'DBINFO':
pass
elif t == 'DETERMINISTIC':
pass
elif t == 'DISALLOW':
self._expect('PARALLEL')
elif t == 'EXTERNAL':
if self._match('NAME'):
self._expect_one_of([TT.STRING, TT.IDENTIFIER])
else:
self._expect('ACTION')
elif t == 'FENCED':
pass
elif t == 'FINAL':
self._expect('CALL')
elif t == 'INHERIT':
# Try and parse INHERIT SPECIAL REGISTERS first
if not self._match('SPECIAL'):
raise ParseBacktrack()
self._expect('REGISTERS')
elif t == 'LANGUAGE':
self._expect_one_of(['SQL', 'C', 'JAVA', 'CLR', 'OLE'])
elif t == 'MODIFIES':
self._expect_sequence(['SQL', 'DATA'])
elif t == 'NO':
t = self._expect_one_of(['DBINFO', 'EXTERNAL', 'FINAL', 'SCRATCHPAD', 'SQL']).value
if t == 'EXTERNAL':
self._expect('ACTION')
elif t == 'FINAL':
self._expect('CALL')
elif t == 'NOT':
self._expect_one_of(['DETERMINISTIC', 'FENCED', 'THREADSAFE', 'VARIANT'])
elif t == 'NULL':
self._expect('CALL')
elif t == 'PARAMETER':
if self._match('CCSID'):
self._expect_one_of(['ASCII', 'UNICODE'])
else:
self._expect('STYLE')
self._expect_one_of(['DB2GENERAL', 'DB2GENERL', 'JAVA', 'SQL', 'DB2SQL'])
elif t == 'READS':
self._expect_sequence(['SQL', 'DATA'])
elif t == 'RETURNS':
if self._match('NULL'):
self._expect_sequence(['ON', 'NULL', 'INPUT'])
elif self._match_one_of(['ROW', 'TABLE']):
if self._match('('):
while True:
self._expect(TT.IDENTIFIER)
self._parse_datatype()
self._match_sequence(['AS', 'LOCATOR'])
if not self._match(','):
break
self._expect(')')
else:
self._parse_datatype()
if self._match_sequence(['CAST', 'FROM']):
self._parse_datatype()
self._match_sequence(['AS', 'LOCATOR'])
elif t == 'SCRATCHPAD':
self._expect(TT.NUMBER)
elif t == 'SPECIFIC':
self._expect(TT.IDENTIFIER)
elif t == 'STATIC':
self._expect('DISPATCH')
elif t == 'THREADSAFE':
pass
elif t == 'TRANSFORM':
self._expect_sequence(['GROUP', TT.IDENTIFIER])
elif t == 'VARIANT':
pass
self._newline()
except ParseBacktrack:
# NOTE: This block only gets called for ParseBacktrack errors.
# Other parse errors will propogate outward. If the above has
# failed, rewind, and drop out of the loop so we can try
# INHERIT ISOLATION LEVEL (and PREDICATES)
self._restore_state()
break
else:
self._forget_state()
# Parse optional PREDICATES clause
if self._match('PREDICATES'):
self._parse_function_predicates_clause()
self._newline()
if self._match('INHERIT'):
self._expect_sequence(['ISOLATION', 'LEVEL'])
self._expect_one_of(['WITH', 'WITHOUT'])
self._expect_sequence(['LOCK', 'REQUEST'])
# Parse the function body
self._outdent()
if self._match('BEGIN'):
self._parse_compiled_compound_statement()
elif self._match('RETURN'):
self._indent()
self._parse_return_statement()
self._outdent()
else:
# External function with no body
pass
def _parse_create_function_mapping_statement(self):
"""Parses a CREATE FUNCTION MAPPING statement"""
# CREATE FUNCTION MAPPING already matched
if not self._match('FOR'):
self._expect_sequence([TT.IDENTIFIER, 'FOR'])
if not self._match('SPECIFIC'):
self._parse_function_name()
self._expect('(', prespace=False)
self._parse_datatype_list()
self._expect(')')
else:
self._parse_function_name()
self._expect('SERVER')
self._parse_remote_server()
if self._match('OPTIONS'):
self._parse_federated_options()
self._match_sequence(['WITH', 'INFIX'])
def _parse_create_histogram_template_statement(self):
"""Parses a CREATE HISTOGRAM TEMPLATE statement"""
# CREATE HISTOGRAM TEMPLATE already matched
self._expect_sequence([TT.IDENTIFIER, 'HIGH', 'BIN', 'VALUE', TT.NUMBER])
def _parse_create_index_statement(self, unique):
"""Parses a CREATE INDEX statement"""
# CREATE [UNIQUE] INDEX already matched
self._parse_index_name()
self._indent()
self._expect('ON')
self._parse_table_name()
self._expect('(')
self._indent()
while True:
if self._match('BUSINESS_TIME'):
self._expect_sequence(['WITHOUT', 'OVERLAPS'])
else:
self._expect(TT.IDENTIFIER)
self._match_one_of(['ASC', 'DESC'])
if not self._match(','):
break
else:
self._newline()
self._outdent()
self._expect(')')
valid = set([
'IN',
'PARTITIONED',
'NOT',
'SPECIFICATION',
'INCLUDE',
'CLUSTER',
'PCTFREE',
'LEVEL2',
'MINPCTUSED',
'ALLOW',
'DISALLOW',
'PAGE',
'COLLECT',
'COMPRESS',
])
while valid:
t = self._match_one_of(valid)
if t:
self._newline(-1)
t = t.value
valid.remove(t)
else:
break
if t == 'IN':
self._expect(TT.IDENTIFIER)
elif t == 'NOT':
self._expect('PARTITIONED')
valid.discard('NOT')
elif t == 'PARTITIONED':
valid.discard('NOT')
elif t == 'SPECIFICATION':
self._expect('ONLY')
elif t == 'INCLUDE':
self._expect('(')
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
self._expect(')')
elif t == 'CLUSTER':
pass
elif t == 'PCTFREE' or t == 'MINPCTUSED':
self._expect(TT.NUMBER)
elif t == 'LEVEL2':
self._expect_sequence(['PCTFREE', TT.NUMBER])
elif t == 'ALLOW' or t == 'DISALLOW':
valid.discard('ALLOW')
valid.discard('DISALLOW')
self._expect_sequence(['REVERSE', 'SCANS'])
elif t == 'PAGE':
self._expect('SPLIT')
self._expect_one_of(['SYMMETRIC', 'HIGH', 'LOW'])
elif t == 'COLLECT':
self._match('SAMPLED')
self._match('DETAILED')
self._expect('STATISTICS')
elif t == 'COMPRESS':
self._expect_one_of(['NO', 'YES'])
def _parse_create_module_statement(self):
"""Parses a CREATE MODULE statement"""
# CREATE MODULE already matched
self._parse_module_name()
def _parse_create_nickname_statement(self):
"""Parses a CREATE NICKNAME statement"""
# CREATE NICKNAME already matched
self._parse_nickname_name()
if self._match('FOR'):
self._parse_remote_object_name()
else:
self._parse_table_definition(aligntypes=True, alignoptions=True, federated=True)
self._expect_sequence(['FOR', 'SERVER', TT.IDENTIFIER])
if self._match('OPTIONS'):
self._parse_federated_options()
def _parse_create_procedure_statement(self):
"""Parses a CREATE PROCEDURE statement"""
# CREATE PROCEDURE already matched
self._parse_procedure_name()
if self._match('SOURCE'):
self._parse_source_object_name()
if self._match('(', prespace=False):
self._expect(')')
elif self._match('NUMBER'):
self._expect_sequence(['OF', 'PARAMETERS', TT.NUMBER])
if self._match('UNIQUE'):
self._expect(TT.STRING)
self.expect_sequence(['FOR', 'SERVER', TT.IDENTIFIER])
elif self._match('(', prespace=False):
if not self._match(')'):
while True:
self._match_one_of(['IN', 'OUT', 'INOUT'])
self._save_state()
try:
self._expect(TT.IDENTIFIER)
self._parse_datatype()
except ParseError:
self._restore_state()
self._parse_datatype()
else:
self._forget_state()
if self._match('DEFAULT'):
self._parse_expression()
if not self._match(','):
break
self._expect(')')
self._indent()
# Parse procedure options (which can appear in any order)
valid = set([
'AUTONOMOUS',
'CALLED',
'COMMIT',
'CONTAINS',
'DBINFO',
'DETERMINISTIC',
'DYNAMIC',
'EXTERNAL',
'FENCED',
'INHERIT',
'LANGUAGE',
'MODIFIES',
'NEW',
'NO',
'NOT',
'NOT',
'NULL',
'OLD',
'PARAMETER',
'PROGRAM',
'READS',
'RESULT',
'SPECIFIC',
'THREADSAFE',
'WITH',
])
while True:
t = self._match_one_of(valid)
if t:
t = t.value
# Note that matches aren't removed from valid, because it's
# simply too complex to figure out what option disallows other
# options in many cases
else:
break
if t == 'AUTONOMOUS':
pass
elif t == 'CALLED':
self._expect_sequence(['ON', 'NULL', 'INPUT'])
elif t == 'COMMIT':
self._expect_sequence(['ON', 'RETURN'])
self._expect_one_of(['NO', 'YES'])
elif t == 'CONTAINS':
self._expect('SQL')
elif t == 'DBINFO':
pass
elif t == 'DETERMINISTIC':
pass
elif t == 'DYNAMIC':
self._expect_sequence(['RESULT', 'SETS', TT.NUMBER])
elif t == 'EXTERNAL':
if self._match('NAME'):
self._expect_one_of([TT.STRING, TT.IDENTIFIER])
else:
self._expect('ACTION')
elif t == 'FENCED':
pass
elif t == 'INHERIT':
self._expect_sequence(['SPECIAL', 'REGISTERS'])
elif t == 'LANGUAGE':
self._expect_one_of(['SQL', 'C', 'JAVA', 'COBOL', 'CLR', 'OLE'])
elif t == 'MODIFIES':
self._expect_sequence(['SQL', 'DATA'])
elif t in ['NEW', 'OLD']:
self._expect_sequence(['SAVEPOINT', 'LEVEL'])
elif t == 'NO':
if self._match('EXTERNAL'):
self._expect('ACTION')
else:
self._expect_one_of(['DBINFO', 'SQL'])
elif t == 'NOT':
self._expect_one_of(['DETERMINISTIC', 'FENCED', 'THREADSAFE'])
elif t == 'NULL':
self._expect('CALL')
elif t == 'PARAMETER':
if self._match('CCSID'):
self._expect_one_of(['ASCII', 'UNICODE'])
else:
self._expect('STYLE')
p = self._expect_one_of([
'DB2GENERAL',
'DB2GENERL',
'DB2DARI',
'DB2SQL',
'GENERAL',
'SIMPLE',
'JAVA',
'SQL'
]).value
if p == 'GENERAL':
self._match_sequence(['WITH', 'NULLS'])
elif p == 'SIMPLE':
self._expect('CALL')
self._match_sequence(['WITH', 'NULLS'])
elif t == 'PROGRAM':
self._expect('TYPE')
self._expect_one_of(['SUB', 'MAIN'])
elif t == 'READS':
self._expect_sequence(['SQL', 'DATA'])
elif t == 'RESULT':
self._expect_sequence(['SETS', TT.NUMBER])
elif t == 'SPECIFIC':
self._expect(TT.IDENTIFIER)
elif t == 'THREADSAFE':
pass
elif t == 'WITH':
self._expect_sequence(['RETURN', 'TO'])
self._expect_one_of(['CALLER', 'CLIENT'])
self._expect('ALL')
self._newline()
self._outdent()
self._expect('BEGIN')
self._parse_compiled_compound_statement()
def _parse_create_role_statement(self):
"""Parses a CREATE ROLE statement"""
# CREATE ROLE already matched
self._expect(TT.IDENTIFIER)
def _parse_create_schema_statement(self):
"""Parses a CREATE SCHEMA statement"""
# CREATE SCHEMA already matched
if self._match('AUTHORIZATION'):
self._expect(TT.IDENTIFIER)
else:
self._expect(TT.IDENTIFIER)
if self._match('AUTHORIZATION'):
self._expect(TT.IDENTIFIER)
# Parse CREATE/COMMENT/GRANT statements
while True:
if self._match('CREATE'):
if self._match('TABLE'):
self._parse_create_table_statement()
elif self._match('VIEW'):
self._parse_create_view_statement()
elif self._match('INDEX'):
self._parse_create_index_statement(unique=False)
elif self._match_sequence(['UNIQUE', 'INDEX']):
self._parse_create_index_statement(unique=True)
else:
self._expected_one_of(['TABLE', 'VIEW', 'INDEX', 'UNIQUE'])
elif self._match_sequence(['COMMENT', 'ON']):
self._parse_comment_statement()
elif self._match('GRANT'):
self._parse_grant_statement()
else:
break
def _parse_create_security_label_component_statement(self):
"""Parses a CREATE SECURITY LABEL COMPONENT statement"""
# CREATE SECURITY LABEL COMPONENT already matched
self._expect(TT.IDENTIFIER)
if self._match('ARRAY'):
self._expect('[', prespace=False)
while True:
self._expect(TT.STRING)
if not self._match(','):
break
self._expect(']')
elif self._match('SET'):
self._expect('{', prespace=False)
while True:
self._expect(TT.STRING)
if not self._match(','):
break
self._expect('}')
elif self._match('TREE'):
self._expect_sequence(['(', TT.STRING, 'ROOT'], prespace=False)
while self._match(','):
self._expect_sequence([TT.STRING, 'UNDER', TT.STRING])
self._expect(')')
def _parse_create_security_label_statement(self):
"""Parses a CREATE SECURITY LABEL statement"""
# CREATE SECURITY LABEL already matched
self._parse_security_label_name()
while True:
self._expect_sequence(['COMPONENT', TT.IDENTIFIER, TT.STRING])
while self._match_sequence([',', TT.STRING]):
pass
if not self._match(','):
break
def _parse_create_security_policy_statement(self):
"""Parses a CREATE SECURITY POLICY statement"""
# CREATE SECURITY POLICY already matched
self._expect_sequence([TT.IDENTIFIER, 'COMPONENTS'])
while True:
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
self._expect_sequence(['WITH', 'DB2LBACRULES'])
if self._match_one_of(['OVERRIDE', 'RESTRICT']):
self._expect_sequence(['NOT', 'AUTHORIZED', 'WRITE', 'SECURITY', 'LABEL'])
def _parse_create_sequence_statement(self):
"""Parses a CREATE SEQUENCE statement"""
# CREATE SEQUENCE already matched
self._parse_sequence_name()
if self._match('AS'):
self._parse_datatype()
self._parse_identity_options()
def _parse_create_service_class_statement(self):
"""Parses a CREATE SERVICE CLASS statement"""
# CREATE SERVICE CLASS already matched
self._expect(TT.IDENTIFIER)
if self._match('UNDER'):
self._expect(TT.IDENTIFIER)
if self._match_sequence(['AGENT', 'PRIORITY']):
self._expect_one_of(['DEFAULT', TT.NUMBER])
if self._match_sequence(['PREFETCH', 'PRIORITY']):
self._expect_one_of(['DEFAULT', 'HIGH', 'MEDIUM', 'LOW'])
if self._match_sequence(['OUTBOUND', 'CORRELATOR']):
self._expect_one_of(['NONE', TT.STRING])
if self._match_sequence(['COLLECT', 'ACTIVITY', 'DATA']):
self._parse_collect_activity_data_clause(alter=True)
if self._match_sequence(['COLLECT', 'AGGREGATE', 'ACTIVITY', 'DATA']):
self._expect_one_of(['NONE', 'BASE', 'EXTENDED'])
if self._match_sequence(['COLLECT', 'AGGREGATE', 'REQUEST', 'DATA']):
self._expect_one_of(['NONE', 'BASE'])
self._parse_histogram_template_clause()
self._match_one_of(['ENABLE', 'DISABLE'])
def _parse_create_server_statement(self):
"""Parses a CREATE SERVER statement"""
# CREATE SERVER already matched
self._expect(TT.IDENTIFIER)
if self._match('TYPE'):
self._expect(TT.IDENTIFIER)
if self._match('VERSION'):
self._parse_server_version()
if self._match('WRAPPER'):
self._expect(TT.IDENTIFIER)
if self._match('AUTHORIZATION'):
self._expect_sequence([TT.IDENTIFIER, 'PASSWORD', TT.IDENTIFIER])
if self._match('OPTIONS'):
self._parse_federated_options()
def _parse_create_table_statement(self):
"""Parses a CREATE TABLE statement"""
# CREATE TABLE already matched
self._parse_table_name()
if self._match('LIKE'):
self._parse_relation_name()
self._parse_copy_options()
else:
# Ambiguity: Open parentheses could indicate an optional field list
# preceding a materialized query or staging table definition
reraise = False
self._save_state()
try:
# Try parsing CREATE TABLE ... AS first
if self._match('('):
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
self._expect(')')
if self._match('AS'):
reraise = True
self._expect('(')
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
self._parse_refreshable_table_options()
elif self._match('FOR'):
reraise = True
self._parse_relation_name()
self._expected_sequence(['PROPAGATE', 'IMMEDIATE'])
else:
self._expected_one_of(['AS', 'FOR'])
except ParseError:
# If that fails, rewind and parse other CREATE TABLE forms
self._restore_state()
if reraise: raise
self._parse_table_definition(aligntypes=True, alignoptions=True, federated=False)
else:
self._forget_state()
# Parse table option suffixes. Not all of these are valid with
# particular table definitions, but it's too difficult to sort out
# which are valid for what we've parsed so far
valid = set([
'ORGANIZE',
'DATA',
'IN',
'INDEX',
'LONG',
'DISTRIBUTE',
'PARTITION',
'COMPRESS',
'VALUE',
'WITH',
'NOT',
'CCSID',
'SECURITY',
'OPTIONS',
])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'ORGANIZE':
self._expect('BY')
if self._match_sequence(['KEY', 'SEQUENCE']):
self._expect('(')
while True:
self._expect(TT.IDENTIFIER)
if self._match('STARTING'):
self._match('FROM')
self._expect(TT.NUMBER)
self._expect('ENDING')
self._match('AT')
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
self._expect_one_of(['ALLOW', 'DISALLOW'])
self._expect('OVERFLOW')
if self._match('PCTFREE'):
self._expect(INTEGER)
else:
self._match('DIMENSIONS')
self._expect('(')
while True:
if self._match('('):
self._parse_ident_list()
self._expect(')')
else:
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
elif t == 'DATA':
self._expect('CAPTURE')
self._expect_one_of(['CHANGES', 'NONE'])
elif t == 'IN':
self._parse_ident_list()
if self._match('NO'):
self._expect('CYCLE')
else:
self._match('CYCLE')
elif t == 'LONG':
self._expect('IN')
self._parse_ident_list()
elif t == 'INDEX':
self._expect_sequence(['IN', TT.IDENTIFIER])
elif t == 'DISTRIBUTE':
self._expect('BY')
if self._match('REPLICATION'):
pass
else:
self._match('HASH')
self._expect('(', prespace=False)
self._parse_ident_list()
self._expect(')')
elif t == 'PARTITION':
self._expect('BY')
self._match('RANGE')
self._expect('(')
while True:
self._expect(TT.IDENTIFIER)
if self._match('NULLS'):
self._expect_one_of(['FIRST', 'LAST'])
if not self._match(','):
break
self._expect_sequence([')', '('])
while True:
if self._match('PARTITION'):
self._expect(TT.IDENTIFIER)
self._parse_partition_boundary()
if self._match('IN'):
self._expect(TT.IDENTIFIER)
elif self._match('EVERY'):
if self._match('('):
self._expect(TT.NUMBER)
self._parse_duration_label()
self._expect(')')
else:
self._expect(TT.NUMBER)
self._parse_duration_label()
if not self._match(','):
break
elif t == 'COMPRESS':
self._expect_one_of(['NO', 'YES'])
elif t == 'VALUE':
self._expect('COMPRESSION')
elif t == 'WITH':
self._expect_sequence(['RESTRICT', 'ON', 'DROP'])
elif t == 'NOT':
self._expect_sequence(['LOGGED', 'INITIALLY'])
elif t == 'CCSID':
self._expect_one_of(['ASCII', 'UNICODE'])
elif t == 'SECURITY':
self._expect_sequence(['POLICY', TT.IDENTIFIER])
elif t == 'OPTIONS':
self._parse_federated_options(alter=False)
def _parse_create_tablespace_statement(self, tbspacetype='REGULAR'):
"""Parses a CREATE TABLESPACE statement"""
# CREATE TABLESPACE already matched
self._expect(TT.IDENTIFIER)
if self._match('IN'):
if self._match('DATABASE'):
self._expect_sequence(['PARTITION', 'GROUP'])
elif self._match('NODEGROUP'):
pass
self._expect(TT.IDENTIFIER)
if self._match('PAGESIZE'):
self._expect(TT.NUMBER)
self._match('K')
if self._match('MANAGED'):
self._expect('BY')
if self._match('AUTOMATIC'):
self._expect('STORAGE')
self._parse_tablespace_size_attributes()
elif self._match('DATABASE'):
self._expect('USING')
while True:
self._parse_database_container_clause()
if self._match('ON'):
self._parse_db_partition_list_clause(size=False)
if not self._match('USING'):
break
self._parse_tablespace_size_attributes()
elif self._match('SYSTEM'):
self._expect('USING')
while True:
self._parse_system_container_clause()
if self._match('ON'):
self._parse_db_partition_list_clause(size=False)
if not self._match('USING'):
break
else:
self._expected_one_of(['AUTOMATIC', 'DATABASE', 'SYSTEM'])
if self._match('EXTENTSIZE'):
self._expect(TT.NUMBER)
self._match_one_of(['K', 'M'])
if self._match('PREFETCHSIZE'):
self._expect(TT.NUMBER)
self._match_one_of(['K', 'M', 'G'])
if self._match('BUFFERPOOL'):
self._expect(TT.IDENTIFIER)
if self._match('OVERHEAD'):
self._expect(TT.NUMBER)
if self._match('NO'):
self._expect_sequence(['FILE', 'SYSTEM', 'CACHING'])
elif self._match('FILE'):
self._expect_sequence(['SYSTEM', 'CACHING'])
if self._match('TRANSFERRATE'):
self._expect(TT.NUMBER)
if self._match('DROPPED'):
self._expect_sequence(['TABLE', 'RECOVERY'])
self._expect_one_of(['ON', 'OFF'])
def _parse_create_threshold_statement(self):
"""Parses a CREATE THRESHOLD statement"""
# CREATE THRESHOLD already matched
self._expect_sequence([TT.IDENTIFIER, 'FOR'])
if self._match('SERVICE'):
self._expect_sequence(['CLASS', TT.IDENTIFIER])
if self._match('UNDER'):
self._expect(TT.IDENTIFIER)
elif self._match('WORKLOAD'):
self._expect(TT.IDENTIFIER)
elif not self._match('DATABASE'):
self._expected_one_of(['SERVICE', 'WORKLOAD', 'DATABASE'])
self._expect_sequence(['ACTIVITIES', 'ENFORCEMENT'])
if self._match('DATABASE'):
self._match('PARTITION')
elif self._match('WORKLOAD'):
self._expect('OCCURRENCE')
else:
self._expected_one_of(['DATABASE', 'WORKLOAD'])
self._match_one_of(['ENABLE', 'DISABLE'])
self._expect('WHEN')
self._parse_threshold_predicate()
self._parse_threshold_exceeded_actions()
def _parse_create_trigger_statement(self):
"""Parses a CREATE TRIGGER statement"""
# CREATE TRIGGER already matched
self._parse_trigger_name()
self._indent()
if self._match_sequence(['NO', 'CASCADE']):
self._expect('BEFORE')
elif self._match('BEFORE'):
pass
elif self._match_sequence(['INSTEAD', 'OF']):
pass
elif self._match('AFTER'):
pass
else:
self._expected_one_of(['AFTER', 'BEFORE', 'NO', 'INSTEAD'])
if self._match('UPDATE'):
if self._match('OF'):
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
else:
self._expect_one_of(['INSERT', 'DELETE', 'UPDATE'])
self._expect('ON')
self._parse_table_name()
if self._match('REFERENCING'):
self._newline(-1)
valid = ['OLD', 'NEW', 'OLD_TABLE', 'NEW_TABLE']
while valid:
if len(valid) == 4:
t = self._expect_one_of(valid)
else:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t in ('OLD', 'NEW'):
if 'OLD_TABLE' in valid: valid.remove('OLD_TABLE')
if 'NEW_TABLE' in valid: valid.remove('NEW_TABLE')
elif t in ('OLD_TABLE', 'NEW_TABLE'):
if 'OLD' in valid: valid.remove('OLD')
if 'NEW' in valid: valid.remove('NEW')
self._match('AS')
self._expect(TT.IDENTIFIER)
self._newline()
self._expect_sequence(['FOR', 'EACH'])
self._expect_one_of(['ROW', 'STATEMENT'])
if self._match('MODE'):
self._newline(-1)
self._expect('DB2SQL')
if self._match('WHEN'):
self._expect('(')
self._indent()
self._parse_search_condition()
self._outdent()
self._expect(')')
try:
label = self._expect(TT.LABEL).value
self._outdent(-1)
self._newline()
except ParseError:
label = None
if self._match('BEGIN'):
if not label: self._outdent(-1)
self._parse_compiled_compound_statement(label=label)
else:
self._newline()
self._parse_compiled_statement()
if not label: self._outdent()
# XXX This shouldn't be here, but DB2 for z/OS appears to have a
# parser bug which allows this
self._match_sequence([(TT.TERMINATOR, ';'), (TT.KEYWORD, 'END')])
def _parse_create_trusted_context_statement(self):
"""Parses a CREATE TRUSTED CONTEXT statement"""
# CREATE TRUSTED CONTEXT already matched
self._expect_sequence([TT.IDENTIFIER, 'BASED', 'UPON', 'CONNECTION', 'USING'])
valid = set([
'SYSTEM',
'ATTRIBUTES',
'NO',
'DEFAULT',
'DISABLE',
'ENABLE',
'WITH',
])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'SYSTEM':
self._expect_sequence(['AUTHID', TT.IDENTIFIER])
elif t == 'ATTRIBUTES':
self._expect('(')
if self._match('ADDRESS'):
self._expect(TT.STRING)
if self._match('WITH'):
self._expect_sequence(['ENCRYPTION', TT.STRING])
elif self._match('ENCRYPTION'):
self._expect(TT.STRING)
if not self._match(','):
break
self._expect(')')
elif t == 'NO':
valid.remove('DEFAULT')
self._expect_sequence(['DEFAULT', 'ROLE'])
elif t == 'DEFAULT':
valid.remove('NO')
self._expect_sequence(['ROLE', TT.IDENTIFIER])
elif t == 'DISABLE':
valid.remove('ENABLE')
elif t == 'ENABLE':
valid.remove('DISABLE')
elif t == 'WITH':
self._expect_sequence(['USE', 'FOR'])
if not self._match('PUBLIC'):
self._expect(TT.IDENTIFIER)
if self._match('ROLE'):
self._expect(TT.IDENTIFIER)
if self._match_one_of(['WITH', 'WITHOUT']):
self._expect('AUTHENTICATION')
def _parse_create_type_statement(self):
"""Parses a CREATE DISTINCT TYPE statement"""
# CREATE DISTINCT TYPE already matched
self._parse_type_name()
self._expect('AS')
self._parse_datatype()
if self._match('ARRAY'):
self._expect('[', prespace=False)
self._match(TT.NUMBER)
self._expect(']')
else:
self._match_sequence(['WITH', 'COMPARISONS'])
def _parse_create_type_mapping_statement(self):
"""Parses a CREATE TYPE MAPPING statement"""
# CREATE TYPE MAPPING already matched
self._match(TT.IDENTIFIER)
valid = set(['FROM', 'TO'])
t = self._expect_one_of(valid).value
valid.remove(t)
self._match_sequence(['LOCAL', 'TYPE'])
self._parse_datatype()
self._expect_one_of(valid)
self._parse_remote_server()
self._match('REMOTE')
self._expect('TYPE')
self._parse_type_name()
if self._match('FOR'):
self._expect_sequence(['BIT', 'DATA'])
elif self._match('(', prespace=False):
if self._match('['):
self._expect_sequence([TT.NUMBER, '..', TT.NUMBER], interspace=False)
self._expect(']')
else:
self._expect(TT.NUMBER)
if self._match(','):
if self._match('['):
self._expect_sequence([TT.NUMBER, '..', TT.NUMBER], interspace=False)
self._expect(']')
else:
self._expect(TT.NUMBER)
self._expect(')')
if self._match('P'):
self._expect_one_of(['=', '>', '<', '>=', '<=', '<>'])
self._expect('S')
def _parse_create_user_mapping_statement(self):
"""Parses a CREATE USER MAPPING statement"""
# CREATE USER MAPPING already matched
self._expect('FOR')
self._expect_one_of(['USER', TT.IDENTIFIER])
self._expect_sequence(['SERVER', TT.IDENTIFIER])
self._expect('OPTIONS')
self._parse_federated_options(alter=False)
def _parse_create_variable_statement(self):
"""Parses a CREATE VARIABLE statement"""
# CREATE VARIABLE already matched
self._parse_variable_name()
self._parse_datatype()
if self._match_one_of(['DEFAULT', 'CONSTANT']):
self._parse_expression()
def _parse_create_view_statement(self):
"""Parses a CREATE VIEW statement"""
# CREATE VIEW already matched
self._parse_view_name()
if self._match('('):
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
self._expect(')')
self._expect('AS')
self._newline()
self._parse_query()
valid = set(['CASCADED', 'LOCAL', 'CHECK', 'ROW', 'NO'])
while valid:
if not self._match('WITH'):
break
t = self._expect_one_of(valid).value
valid.remove(t)
if t in ('CASCADED', 'LOCAL', 'CHECK'):
valid.discard('CASCADED')
valid.discard('LOCAL')
valid.discard('CHECK')
if t != 'CHECK':
self._expect('CHECK')
self._expect('OPTION')
elif t == 'NO':
valid.remove('ROW')
self._expect_sequence(['ROW', 'MOVEMENT'])
elif t == 'ROW':
valid.remove('NO')
self._expect('MOVEMENT')
def _parse_create_work_action_set_statement(self):
"""Parses a CREATE WORK ACTION SET statement"""
# CREATE WORK ACTION SET already matched
self._expect(TT.IDENTIFIER)
self._expect('FOR')
if self._match('SERVICE'):
self._expect_sequence(['CLASS', TT.IDENTIFIER])
elif self._match('DATABASE'):
pass
else:
self._expected_one_of(['SERVICE', 'DATABASE'])
self._expect_sequence(['USING', 'WORK', 'CLASS', 'SET', TT.IDENTIFIER])
if self._match('('):
self._indent()
while True:
self._expect_sequence(['WORK', 'ACTION', TT.IDENTIFIER, 'ON', 'WORK', 'CLASS', TT.IDENTIFIER])
self._parse_action_types_clause()
self._parse_histogram_template_clause()
self._match_one_of(['ENABLE', 'DISABLE'])
if self._match(','):
self._newline()
else:
break
self._outdent()
self._expect(')')
self._match_one_of(['ENABLE', 'DISABLE'])
def _parse_create_work_class_set_statement(self):
"""Parses a CREATE WORK CLASS SET statement"""
# CREATE WORK CLASS SET already matched
self._expect(TT.IDENTIFIER)
if self._match('('):
self._indent()
while True:
self._match_sequence(['WORK', 'CLASS'])
self._expect(TT.IDENTIFIER)
self._parse_work_attributes()
if self._match('POSITION'):
self._parse_position_clause()
if self._match(','):
self._newline()
else:
break
self._outdent()
self._expect(')')
def _parse_create_workload_statement(self):
"""Parses a CREATE WORKLOAD statement"""
# CREATE WORKLOAD statement
self._expect(TT.IDENTIFIER)
first = True
while True:
# Repeatedly try and match connection attributes. Only raise a
# parse error if the first match fails
try:
self._parse_connection_attributes()
except ParseError, e:
if first:
raise e
else:
first = False
self._match_one_of(['ENABLE', 'DISABLE'])
if self._match_one_of(['ALLOW', 'DISALLOW']):
self._expect_sequence(['DB', 'ACCESS'])
if self._match_sequence(['SERVICE', 'CLASS']):
if not self._match('SYSDEFAULTUSERCLASS'):
self._expect(TT.IDENTIFIER)
self._match_sequence(['UNDER', TT.IDENTIFIER])
if self._match('POSITION'):
self._parse_position_clause()
if self._match_sequence(['COLLECT', 'ACTIVITY', 'DATA']):
self._parse_collect_activity_data_clause(alter=True)
def _parse_create_wrapper_statement(self):
"""Parses a CREATE WRAPPER statement"""
# CREATE WRAPPER already matched
self._expect(TT.IDENTIFIER)
if self._match('LIBRARY'):
self._expect(TT.STRING)
if self._match('OPTIONS'):
self._parse_federated_options(alter=False)
def _parse_declare_cursor_statement(self):
"""Parses a top-level DECLARE CURSOR statement"""
# DECLARE already matched
self._expect_sequence([TT.IDENTIFIER, 'CURSOR'])
self._match_sequence(['WITH', 'HOLD'])
self._expect('FOR')
self._newline()
self._parse_select_statement()
def _parse_declare_global_temporary_table_statement(self):
"""Parses a DECLARE GLOBAL TEMPORARY TABLE statement"""
# DECLARE GLOBAL TEMPORARY TABLE already matched
self._parse_table_name()
if self._match('LIKE'):
self._parse_table_name()
self._parse_copy_options()
elif self._match('AS'):
self._parse_full_select()
self._expect_sequence(['DEFINITION', 'ONLY'])
self._parse_copy_options()
else:
self._parse_table_definition(aligntypes=True, alignoptions=False, federated=False)
valid = set(['ON', 'NOT', 'WITH', 'IN', 'PARTITIONING'])
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'ON':
self._expect('COMMIT')
self._expect_one_of(['DELETE', 'PRESERVE'])
self._expect('ROWS')
elif t == 'NOT':
self._expect('LOGGED')
if self._match('ON'):
self._expect('ROLLBACK')
self._expect_one_of(['DELETE', 'PRESERVE'])
self._expect('ROWS')
elif t == 'WITH':
self._expect('REPLACE')
elif t == 'IN':
self._expect(TT.IDENTIFIER)
elif t == 'PARTITIONING':
self._expect('KEY')
self._expect('(')
self._parse_ident_list()
self._expect(')')
self._match_sequence(['USING', 'HASHING'])
def _parse_delete_statement(self):
"""Parses a DELETE statement"""
# DELETE already matched
self._expect('FROM')
if self._match('('):
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
else:
self._parse_subschema_name()
# Ambiguity: INCLUDE is an identifier and hence can look like a table
# correlation name
reraise = False
self._save_state()
try:
# Try and parse a mandatory table correlation followed by a
# mandatory INCLUDE
self._parse_table_correlation(optional=False)
self._newline()
self._expect('INCLUDE')
reraise = True
self._expect('(')
self._indent()
self._parse_ident_type_list(newlines=True)
self._outdent()
self._expect(')')
# XXX Is SET required for an assignment clause? The syntax diagram
# doesn't think so...
if self._match('SET'):
self._parse_assignment_clause(allowdefault=False)
except ParseError:
# If that fails, rewind and parse an optional INCLUDE or an
# optional table correlation
self._restore_state()
if reraise: raise
if self._match('INCLUDE'):
self._newline(-1)
self._expect('(')
self._indent()
self._parse_ident_type_list(newlines=True)
self._outdent()
self._expect(')')
if self._match('SET'):
self._newline(-1)
self._parse_assignment_clause(allowdefault=False)
else:
self._parse_table_correlation()
else:
self._forget_state()
if self._match('WHERE'):
self._newline(-1)
self._indent()
self._parse_search_condition()
self._outdent()
if self._match('WITH'):
self._newline(-1)
self._expect_one_of(['RR', 'RS', 'CS', 'UR'])
def _parse_drop_statement(self):
"""Parses a DROP statement"""
# DROP already matched
if self._match_one_of(['ALIAS', 'SYNONYM', 'TABLE', 'VIEW', 'NICKNAME', 'VARIABLE']):
self._parse_subschema_name()
elif self._match_sequence(['FUNCTION', 'MAPPING']):
self._parse_function_name()
elif self._match_one_of(['FUNCTION', 'PROCEDURE']):
self._parse_routine_name()
if self._match('(', prespace=False):
self._parse_datatype_list()
self._expect(')')
elif self._match('SPECIFIC'):
self._expect_one_of(['FUNCTION', 'PROCEDURE'])
self._parse_routine_name()
elif self._match('INDEX'):
self._parse_index_name()
elif self._match('SEQUENCE'):
self._parse_sequence_name()
elif self._match_sequence(['SERVICE', 'CLASS']):
self._expect(TT.IDENTIFIER)
if self._match('UNDER'):
self._expect(TT.IDENTIFIER)
elif self._match_one_of(['TABLESPACE', 'TABLESPACES']):
self._parse_ident_list()
elif self._match_one_of(['DATA', 'DISTINCT']):
self._expect('TYPE')
self._parse_type_name()
elif self._match_sequence(['TYPE', 'MAPPING']):
self._parse_type_name()
elif self._match('TYPE'):
self._parse_type_name()
elif self._match_sequence(['USER', 'MAPPING']):
self._expect('FOR')
self._expect_one_of(['USER', TT.IDENTIFIER])
self._expect_sequence(['SERVER', TT.IDENTIFIER])
elif (self._match_sequence(['AUDIT', 'POLICY']) or
self._match('BUFFERPOOL') or
self._match_sequence(['EVENT', 'MONITOR']) or
self._match_sequence(['HISTORGRAM', 'TEMPLATE']) or
self._match('NODEGROUP') or
self._match_sequence(['DATABASE', 'PARTITION', 'GROUP']) or
self._match('ROLE') or
self._match('SCHEMA') or
self._match_sequence(['SECURITY', 'LABEL', 'COMPONENT']) or
self._match_sequence(['SECURITY', 'LABEL']) or
self._match_sequence(['SECURITY', 'POLICY']) or
self._match('SERVER') or
self._match('THRESHOLD') or
self._match('TRIGGER') or
self._match_sequence(['TRUSTED', 'CONTEXT']) or
self._match_sequence(['WORK', 'ACTION', 'SET']) or
self._match_sequence(['WORK', 'CLASS', 'SET']) or
self._match('WORKLOAD') or
self._match('WRAPPER')):
self._expect(TT.IDENTIFIER)
else:
self._expected_one_of([
'ALIAS',
'AUDIT',
'BUFFERPOOL',
'DATA',
'DATABASE',
'DISTINCT',
'EVENT',
'FUNCTION',
'HISTOGRAM',
'INDEX',
'NICKNAME',
'NODEGROUP',
'PROCEDURE',
'ROLE',
'SCHEMA',
'SECURITY',
'SEQUENCE',
'SERVICE',
'SPECIFIC',
'TABLE',
'TABLESPACE',
'THRESHOLD',
'TRIGGER',
'TRUSTED',
'TYPE',
'USER',
'VARIABLE',
'VIEW',
'WORK',
'WORKLOAD',
'WRAPPER',
])
# XXX Strictly speaking, this isn't DB2 syntax - it's generic SQL. But
# if we stick to strict DB2 semantics, this routine becomes boringly
# long...
self._match_one_of(['RESTRICT', 'CASCADE'])
def _parse_execute_immediate_statement(self):
"""Parses an EXECUTE IMMEDIATE statement in a procedure"""
# EXECUTE IMMEDIATE already matched
self._parse_expression()
def _parse_execute_statement(self):
"""Parses an EXECUTE statement in a procedure"""
# EXECUTE already matched
self._expect(TT.IDENTIFIER)
if self._match('INTO'):
while True:
self._parse_subrelation_name()
if self._match('['):
self._parse_expression()
self._expect(']')
if self._match('.'):
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
if self._match('USING'):
self._parse_expression_list()
def _parse_explain_statement(self):
"""Parses an EXPLAIN statement"""
# EXPLAIN already matched
if self._match('PLAN'):
self._match('SELECTION')
else:
self._expect_one_of(['PLAN', 'ALL'])
if self._match_one_of(['FOR', 'WITH']):
self._expect('SNAPSHOT')
self._match_sequence(['WITH', 'REOPT', 'ONCE'])
self._match_sequence(['SET', 'QUERYNO', '=', TT.NUMBER])
self._match_sequence(['SET', 'QUEYRTAG', '=', TT.STRING])
self._expect('FOR')
if self._match('DELETE'):
self._parse_delete_statement()
elif self._match('INSERT'):
self._parse_insert_statement()
elif self._match('MERGE'):
self._parse_merge_statement()
elif self._match_sequence(['REFRESH', 'TABLE']):
self._parse_refresh_table_statement()
elif self._match_sequence(['SET', 'INTEGRITY']):
self._parse_set_integrity_statement()
elif self._match('UPDATE'):
self._parse_update_statement()
else:
self._parse_select_statement()
def _parse_fetch_statement(self):
"""Parses a FETCH FROM statement in a procedure"""
# FETCH already matched
self._match('FROM')
self._expect(TT.IDENTIFIER)
if self._match('INTO'):
self._parse_ident_list()
elif self._match('USING'):
self._expect('DESCRIPTOR')
self._expect(TT.IDENTIFIER)
else:
self._expected_one_of(['INTO', 'USING'])
def _parse_flush_optimization_profile_cache_statement(self):
"""Parses a FLUSH OPTIMIZATION PROFILE CACHE statement"""
# FLUSH OPTIMIZATION PROFILE CACHE already matched
if not self._match('ALL'):
self._parse_subschema_name()
def _parse_for_statement(self, label=None):
"""Parses a FOR-loop in a dynamic compound statement"""
# FOR already matched
self._expect_sequence([TT.IDENTIFIER, 'AS'])
reraise = False
self._indent()
# Ambiguity: IDENTIFIER vs. select-statement
self._save_state()
try:
self._expect(TT.IDENTIFIER)
self._match_one_of(['ASENSITIVE', 'INSENSITIVE'])
self._expect('CURSOR')
reraise = True
if self._match_one_of(['WITH', 'WITHOUT']):
self._expect('HOLD')
self._expect('FOR')
except ParseError:
self._restore_state()
if reraise: raise
else:
self._forget_state()
self._parse_select_statement()
self._outdent()
self._expect('DO')
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
self._newline()
if self._match('END'):
break
self._outdent(-1)
self._expect('FOR')
if label:
self._match((TT.IDENTIFIER, label))
def _parse_free_locator_statement(self):
"""Parses a FREE LOCATOR statement"""
# FREE LOCATOR already matched
self._parse_ident_list()
def _parse_get_diagnostics_statement(self):
"""Parses a GET DIAGNOSTICS statement in a dynamic compound statement"""
# GET DIAGNOSTICS already matched
if self._match('EXCEPTION'):
self._expect((TT.NUMBER, 1))
while True:
self._expect_sequence([TT.IDENTIFIER, '='])
self._expect_one_of(['MESSAGE_TEXT', 'DB2_TOKEN_STRING'])
if not self._match(','):
break
else:
self._expect_sequence([TT.IDENTIFIER, '='])
self._expect(['ROW_COUNT', 'DB2_RETURN_STATUS'])
def _parse_goto_statement(self):
"""Parses a GOTO statement in a procedure"""
# GOTO already matched
self._expect(TT.IDENTIFIER)
def _parse_grant_statement(self):
"""Parses a GRANT statement"""
# GRANT already matched
self._parse_grant_revoke(grant=True)
def _parse_if_statement(self):
"""Parses an IF-conditional in a dynamic compound statement"""
# IF already matched
t = 'IF'
while True:
if t in ('IF', 'ELSEIF'):
self._parse_search_condition(newlines=False)
self._expect('THEN')
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
t = self._match_one_of(['ELSEIF', 'ELSE', 'END'])
if t:
self._outdent(-1)
t = t.value
break
else:
self._newline()
elif t == 'ELSE':
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
if self._match('END'):
self._outdent(-1)
break
else:
self._newline()
break
else:
break
self._expect('IF')
def _parse_insert_statement(self):
"""Parses an INSERT statement"""
# INSERT already matched
self._expect('INTO')
if self._match('('):
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
else:
self._parse_subschema_name()
if self._match('('):
self._indent()
self._parse_ident_list(newlines=True)
self._outdent()
self._expect(')')
if self._match('INCLUDE'):
self._newline(-1)
self._expect('(')
self._indent()
self._parse_ident_type_list(newlines=True)
self._outdent()
self._expect(')')
# Parse a full-select with optional common-table-expression, allowing
# the DEFAULT keyword in (for example) a VALUES clause
self._newline()
self._parse_query(allowdefault=True)
if self._match('WITH'):
self._newline(-1)
self._expect_one_of(['RR', 'RS', 'CS', 'UR'])
def _parse_iterate_statement(self):
"""Parses an ITERATE statement within a loop"""
# ITERATE already matched
self._match(TT.IDENTIFIER)
def _parse_leave_statement(self):
"""Parses a LEAVE statement within a loop"""
# LEAVE already matched
self._match(TT.IDENTIFIER)
def _parse_lock_table_statement(self):
"""Parses a LOCK TABLE statement"""
# LOCK TABLE already matched
self._parse_table_name()
self._expect('IN')
self._expect_one_of(['SHARE', 'EXCLUSIVE'])
self._expect('MODE')
def _parse_loop_statement(self, label=None):
"""Parses a LOOP-loop in a procedure"""
# LOOP already matched
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
if self._match('END'):
self._outdent(-1)
break
else:
self._newline()
self._expect('LOOP')
if label:
self._match((TT.IDENTIFIER, label))
def _parse_merge_statement(self):
# MERGE already matched
self._expect('INTO')
if self._match('('):
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
else:
self._parse_subschema_name()
self._parse_table_correlation()
self._expect('USING')
self._parse_table_ref()
self._expect('ON')
self._parse_search_condition()
self._expect('WHEN')
while True:
self._match('NOT')
self._expect('MATCHED')
if self._match('AND'):
self._parse_search_condition()
self._expect('THEN')
self._indent()
if self._match('UPDATE'):
self._expect('SET')
self._parse_assignment_clause(allowdefault=True)
elif self._match('INSERT'):
if self._match('('):
self._parse_ident_list()
self._expect(')')
self._expect('VALUES')
if self._match('('):
self._parse_expression_list(allowdefault=True)
self._expect(')')
else:
if not self._match('DEFAULT'):
self._parse_expression()
if not self._match(','):
break
elif self._match('DELETE'):
pass
elif self._match('SIGNAL'):
self._parse_signal_statement
self._outdent()
if not self._match('WHEN'):
break
self._match_sequence(['ELSE', 'IGNORE'])
def _parse_open_statement(self):
"""Parses an OPEN cursor statement"""
# OPEN already matched
self._expect(TT.IDENTIFIER)
if self._match('('):
if not self._match(')'):
self._parse_expression_list()
self._expect(')')
if self._match('USING'):
self._parse_expression_list()
def _parse_prepare_statement(self):
"""Parses a PREPARE statement"""
# PREPARE already matched
self._expect(TT.IDENTIFIER)
if self._match('OUTPUT'):
self._expect('INTO')
self._expect(TT.IDENTIFIER)
elif self._match('INTO'):
self._expect(TT.IDENTIFIER)
if self._match('INPUT'):
self._expect('INTO')
self._expect(TT.IDENTIFIER)
self._expect('FROM')
self._parse_expression()
def _parse_refresh_table_statement(self):
"""Parses a REFRESH TABLE statement"""
# REFRESH TABLE already matched
while True:
self._parse_table_name()
queryopt = False
if self._match('ALLOW'):
if self._match_one_of(['NO', 'READ', 'WRITE']):
self._expect('ACCESS')
elif self._match_sequence(['QUERY', 'OPTIMIZATION']):
queryopt = True
self._expect_sequence(['USING', 'REFRESH', 'DEFERRED', 'TABLES'])
self._match_sequence(['WITH', 'REFRESH', 'AGE', 'ANY'])
else:
self._expected_one_of(['NO', 'READ', 'WRITE', 'QUERY'])
if not queryopt:
if self._match_sequence(['USING', 'REFRESH', 'DEFERRED', 'TABLES']):
self._match_sequence(['WITH', 'REFRESH', 'AGE', 'ANY'])
if not self._match(','):
break
self._match('NOT')
self._match('INCREMENTAL')
def _parse_release_savepoint_statement(self):
"""Parses a RELEASE SAVEPOINT statement"""
# RELEASE [TO] SAVEPOINT already matched
self._expect(TT.IDENTIFIER)
def _parse_rename_tablespace_statement(self):
"""Parses a RENAME TABLESPACE statement"""
# RENAME TABLESPACE already matched
self._expect_sequence([TT.IDENTIFIER, 'TO', TT.IDENTIFIER])
def _parse_rename_statement(self):
"""Parses a RENAME statement"""
# RENAME already matched
if self._match('INDEX'):
self._parse_index_name()
else:
self._match('TABLE')
self._parse_table_name()
self._expect_sequence(['TO', TT.IDENTIFIER])
def _parse_repeat_statement(self, label=None):
"""Parses a REPEAT-loop in a procedure"""
# REPEAT already matched
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
self._newline()
if self._match('UNTIL'):
break
else:
self._newline()
self._outdent(-1)
self._parse_search_condition()
self._expect_sequence(['END', 'REPEAT'])
if label:
self._match((TT.IDENTIFIER, label))
def _parse_resignal_statement(self):
"""Parses a RESIGNAL statement in a dynamic compound statement"""
# SIGNAL already matched
if self._match('SQLSTATE'):
self._match('VALUE')
self._expect_one_of([TT.IDENTIFIER, TT.STRING])
else:
if not self._match(TT.IDENTIFIER):
return
if self._match('SET'):
self._expect_sequence(['MESSAGE_TEXT', '='])
self._parse_expression()
def _parse_return_statement(self):
"""Parses a RETURN statement in a compound statement"""
# RETURN already matched
self._save_state()
try:
# Try and parse a select-statement
self._parse_query()
except ParseError:
# If it fails, rewind and try an expression or tuple instead
self._restore_state()
self._save_state()
try:
self._parse_expression()
except ParseError:
self._restore_state()
# If parsing an expression fails, assume it's a parameter-less
# RETURN (as can be used in a procedure)
else:
self._forget_state()
else:
self._forget_state()
def _parse_revoke_statement(self):
"""Parses a REVOKE statement"""
# REVOKE already matched
self._parse_grant_revoke(grant=False)
def _parse_rollback_statement(self):
"""Parses a ROLLBACK statement"""
# ROLLBACK already matched
self._match('WORK')
if self._match('TO'):
self._expect('SAVEPOINT')
self._match(TT.IDENTIFIER)
def _parse_savepoint_statement(self):
"""Parses a SAVEPOINT statement"""
# SAVEPOINT already matched
self._expect(TT.IDENTIFIER)
self._match('UNIQUE')
self._expect_sequence(['ON', 'ROLLBACK', 'RETAIN', 'CURSORS'])
self._match_sequence(['ON', 'ROLLBACK', 'RETAIN', 'LOCKS'])
def _parse_select_statement(self, allowinto=False):
"""Parses a SELECT statement"""
# A top-level select-statement never permits DEFAULTS, although it
# might permit INTO in a procedure
self._parse_query(allowdefault=False, allowinto=allowinto)
# Parse optional SELECT attributes (FOR UPDATE, WITH isolation, etc.)
valid = ['WITH', 'FOR', 'OPTIMIZE']
while valid:
t = self._match_one_of(valid)
if t:
self._newline(-1)
t = t.value
valid.remove(t)
else:
break
if t == 'FOR':
if self._match_one_of(['READ', 'FETCH']):
self._expect('ONLY')
elif self._match('UPDATE'):
if self._match('OF'):
self._parse_ident_list()
else:
self._expected_one_of(['READ', 'FETCH', 'UPDATE'])
elif t == 'OPTIMIZE':
self._expect_sequence(['FOR', TT.NUMBER])
self._expect_one_of(['ROW', 'ROWS'])
elif t == 'WITH':
if self._expect_one_of(['RR', 'RS', 'CS', 'UR']).value in ('RR', 'RS'):
if self._match('USE'):
self._expect_sequence(['AND', 'KEEP'])
self._expect_one_of(['SHARE', 'EXCLUSIVE', 'UPDATE'])
self._expect('LOCKS')
def _parse_set_integrity_statement(self):
"""Parses a SET INTEGRITY statement"""
def parse_access_mode():
if self._match_one_of(['NO', 'READ']):
self._expect('ACCESS')
def parse_cascade_clause():
if self._match('CASCADE'):
if self._expect_one_of(['DEFERRED', 'IMMEDIATE']).value == 'IMMEDIATE':
if self._match('TO'):
if self._match('ALL'):
self._expect('TABLES')
else:
while True:
if self._match('MATERIALIZED'):
self._expect_sequence(['QUERY', 'TABLES'])
elif self._match('FOREIGN'):
self._expect_sequence(['KEY', 'TABLES'])
elif self._match('STAGING'):
self._expect('TABLES')
else:
self._expected_one_of(['MATERIALIZED', 'STAGING', 'FOREIGN'])
if not self._match(','):
break
def parse_check_options():
valid = [
'INCREMENTAL',
'NOT',
'FORCE',
'PRUNE',
'FULL',
'FOR',
]
while valid:
t = self._match_one_of(valid)
if t:
t = t.value
valid.remove(t)
else:
break
if t == 'INCREMENTAL':
valid.remove('NOT')
elif t == (TT.KEYWORD, 'NOT'):
self._expect('INCREMENTAL')
valid.remove('INCREMENTAL')
elif t == 'FORCE':
self._expect('GENERATED')
elif t == 'PRUNE':
pass
elif t == 'FULL':
self._expect('ACCESS')
elif t == 'FOR':
self._expect('EXCEPTION')
while True:
self._expect('IN')
self._parse_table_name()
self._expect('USE')
self._parse_table_name()
if not self._match(','):
break
def parse_integrity_options():
if not self._match('ALL'):
while True:
if self._match('FOREIGN'):
self._expect('KEY')
elif self._match('CHECK'):
pass
elif self._match('DATALINK'):
self._expect_sequence(['RECONCILE', 'PENDING'])
elif self._match('MATERIALIZED'):
self._expect('QUERY')
elif self._match('GENERATED'):
self._expect('COLUMN')
elif self._match('STAGING'):
pass
else:
self._expected_one_of([
'FOREIGN',
'CHECK',
'DATALINK',
'MATERIALIZED',
'GENERATED',
'STAGING',
])
if not self._match(','):
break
# SET INTEGRITY already matched
self._expect('FOR')
# Ambiguity: SET INTEGRITY ... CHECKED and SET INTEGRITY ... UNCHECKED
# have very different syntaxes, but only after initial similarities.
reraise = False
self._save_state()
try:
# Try and parse SET INTEGRITY ... IMMEDIATE CHECKED
while True:
self._parse_table_name()
if self._match(','):
reraise = True
else:
break
if self._match('OFF'):
reraise = True
parse_access_mode()
parse_cascade_clause()
elif self._match('TO'):
reraise = True
self._expect_sequence(['DATALINK', 'RECONCILE', 'PENDING'])
elif self._match('IMMEDIATE'):
reraise = True
self._expect('CHECKED')
parse_check_options()
elif self._match('FULL'):
reraise = True
self._expect('ACCESS')
elif self._match('PRUNE'):
reraise = True
else:
self._expected_one_of(['OFF', 'TO', 'IMMEDIATE', 'FULL', 'PRUNE'])
except ParseError:
# If that fails, parse SET INTEGRITY ... IMMEDIATE UNCHECKED
self._restore_state()
if reraise: raise
while True:
self._parse_table_name()
parse_integrity_options()
if self._match('FULL'):
self._expect('ACCESS')
if not self._match(','):
break
else:
self._forget_state()
def _parse_set_isolation_statement(self):
"""Parses a SET ISOLATION statement"""
# SET [CURRENT] ISOLATION already matched
self._match('=')
self._expect_one_of(['UR', 'CS', 'RR', 'RS', 'RESET'])
def _parse_set_lock_timeout_statement(self):
"""Parses a SET LOCK TIMEOUT statement"""
# SET [CURRENT] LOCK TIMEOUT already matched
self._match('=')
if self._match('WAIT'):
self._match(TT.NUMBER)
elif self._match('NOT'):
self._expect('WAIT')
elif self._match('NULL'):
pass
elif self._match(TT.NUMBER):
pass
else:
self._expected_one_of(['WAIT', 'NOT', 'NULL', TT.NUMBER])
def _parse_set_path_statement(self):
"""Parses a SET PATH statement"""
# SET [CURRENT] PATH already matched
self._match('=')
while True:
if self._match_sequence([(TT.REGISTER, 'SYSTEM'), (TT.REGISTER, 'PATH')]):
pass
elif self._match((TT.REGISTER, 'USER')):
pass
elif self._match((TT.REGISTER, 'CURRENT')):
self._match((TT.REGISTER, 'PACKAGE'))
self._expect((TT.REGISTER, 'PATH'))
elif self._match((TT.REGISTER, 'CURRENT_PATH')):
pass
else:
self._expect_one_of([TT.IDENTIFIER, TT.STRING])
if not self._match(','):
break
def _parse_set_schema_statement(self):
"""Parses a SET SCHEMA statement"""
# SET [CURRENT] SCHEMA already matched
self._match('=')
t = self._expect_one_of([
(TT.REGISTER, 'USER'),
(TT.REGISTER, 'SESSION_USER'),
(TT.REGISTER, 'SYSTEM_USER'),
(TT.REGISTER, 'CURRENT_USER'),
TT.IDENTIFIER,
TT.STRING,
])
if t.type in (TT.IDENTIFIER, TT.STRING):
self.current_schema = t.value
def _parse_set_session_auth_statement(self):
"""Parses a SET SESSION AUTHORIZATION statement"""
# SET SESSION AUTHORIZATION already matched
self._match('=')
self._expect_one_of([
(TT.REGISTER, 'USER'),
(TT.REGISTER, 'SYSTEM_USER'),
(TT.REGISTER, 'CURRENT_USER'),
TT.IDENTIFIER,
TT.STRING,
])
self._match_sequence(['ALLOW', 'ADMINISTRATION'])
def _parse_set_statement(self):
"""Parses a SET statement in a dynamic compound statement"""
# SET already matched
if self._match('CURRENT'):
if self._match_sequence(['DECFLOAT', 'ROUNDING', 'MODE']):
self._match('=')
self._expect_one_of([
'ROUND_CEILING',
'ROUND_FLOOR',
'ROUND_DOWN',
'ROUND_HALF_EVEN',
'ROUND_HALF_UP',
TT.STRING,
])
if self._match('DEGREE'):
self._match('=')
self._expect(TT.STRING)
elif self._match('EXPLAIN'):
if self._match('MODE'):
self._match('=')
if self._match_one_of(['EVALUATE', 'RECOMMEND']):
self._expect_one_of(['INDEXES', 'PARTITIONINGS'])
elif self._match_one_of(['NO', 'YES', 'REOPT', 'EXPLAIN']):
pass
else:
self._expected_one_of([
'NO',
'YES',
'REOPT',
'EXPLAIN',
'EVALUATE',
'RECOMMEND',
])
elif self._match('SNAPSHOT'):
self._expect_one_of(['NO', 'YES', 'EXPLAIN', 'REOPT'])
else:
self._expected_one_of(['MODE', 'SNAPSHOT'])
elif self._match_sequence(['FEDERATED', 'ASYNCHRONY']):
self._match('=')
self._expect_one_of(['ANY', TT.NUMBER])
elif self._match_sequence(['IMPLICIT', 'XMLPARSE', 'OPTION']):
self._match('=')
self._expect(TT.STRING)
elif self._match('ISOLATION'):
self._parse_set_isolation_statement()
elif self._match_sequence(['LOCK', 'TIMEOUT']):
self._parse_set_lock_timeout_statement()
elif self._match('MAINTAINED'):
self._match('TABLE')
self._expect('TYPES')
self._match_sequence(['FOR', 'OPTIMIZATION'])
self._match('=')
while True:
if self._match_one_of(['ALL', 'NONE']):
break
elif self._match_one_of(['FEDERATED_TOOL', 'USER', 'SYSTEM']):
pass
elif self._match('CURRENT'):
self._expect('MAINTAINED')
self._match('TABLE')
self._expect('TYPES')
self._match_sequence(['FOR', 'OPTIMIZATION'])
if not self._match(','):
break
elif self._match_sequence(['MDC', 'ROLLOUT', 'MODE']):
self._expect_one_of(['NONE', 'IMMEDATE', 'DEFERRED'])
elif self._match_sequence(['OPTIMIZATION', 'PROFILE']):
self._match('=')
if not self._match(TT.STRING) and not self._match('NULL'):
self._parse_subschema_name()
elif self._match_sequence(['QUERY', 'OPTIMIZATION']):
self._match('=')
self._expect(TT.NUMBER)
elif self._match_sequence(['REFRESH', 'AGE']):
self._match('=')
self._expect_one_of(['ANY', TT.NUMBER])
elif self._match('PATH'):
self._parse_set_path_statement()
elif self._match('SCHEMA'):
self._parse_set_schema_statement()
else:
self._expected_one_of([
'DEGREE',
'EXPLAIN',
'ISOLATION',
'LOCK',
'MAINTAINED',
'QUERY',
'REFRESH',
'PATH',
'SCHEMA',
])
elif self._match_sequence(['COMPILATION', 'ENVIRONMENT']):
self._match('=')
self._expect(TT.IDENTIFIER)
elif self._match('ISOLATION'):
self._parse_set_isolation_statement()
elif self._match_sequence(['LOCK', 'TIMEOUT']):
self._parse_set_lock_timeout_statement()
elif self._match_sequence(['ENCRYPTION', 'PASSWORD']):
self._match('=')
self._expect(TT.STRING)
elif self._match_sequence(['EVENT', 'MONITOR']):
self._expect(TT.IDENTIFIER)
self._expect('STATE')
self._match('=')
self._expect(TT.NUMBER)
elif self._match('PASSTHRU'):
self._expect_one_of(['RESET', TT.IDENTIFIER])
elif self._match('PATH'):
self._parse_set_path_statement()
elif self._match('ROLE'):
self._match('=')
self._expect(TT.IDENTIFIER)
elif self._match('CURRENT_PATH'):
self._parse_set_path_statement()
elif self._match('SCHEMA'):
self._parse_set_schema_statement()
elif self._match_sequence(['SERVER', 'OPTION']):
self._expect_sequence([TT.IDENTIFIER, 'TO', TT.STRING, 'FOR', 'SERVER', TT.IDENTIFIER])
elif self._match_sequence(['SESSION', 'AUTHORIZATION']):
self._parse_set_session_auth_statement()
elif self._match('SESSION_USER'):
self._parse_set_session_auth_statement()
else:
self._parse_assignment_clause(allowdefault=True)
def _parse_signal_statement(self):
"""Parses a SIGNAL statement in a dynamic compound statement"""
# SIGNAL already matched
if self._match('SQLSTATE'):
self._match('VALUE')
self._expect_one_of([TT.IDENTIFIER, TT.STRING])
else:
self._expect(TT.IDENTIFIER)
if self._match('SET'):
self._expect_sequence(['MESSAGE_TEXT', '='])
self._parse_expression()
elif self._match('('):
# XXX Ensure syntax only valid within a trigger
self._parse_expression()
self._expect(')')
def _parse_transfer_ownership_statement(self):
"""Parses a TRANSFER OWNERSHIP statement"""
# TRANSFER OWNERSHIP already matched
self._expect('OF')
if self._match_one_of(['ALIAS', 'TABLE', 'VIEW', 'NICKNAME', 'VARIABLE']):
self._parse_subschema_name()
elif self._match_sequence(['FUNCTION', 'MAPPING']):
self._parse_function_name()
elif self._match_one_of(['FUNCTION', 'PROCEDURE']):
self._parse_routine_name()
if self._match('('):
self._parse_datatype_list()
self._expect(')')
elif self._match('SPECIFIC'):
self._expect_one_of(['FUNCTION', 'PROCEDURE'])
self._parse_routine_name()
elif self._match('INDEX'):
self._parse_index_name()
elif self._match('SEQUENCE'):
self._parse_sequence_name()
elif self._match('DISTINCT'):
self._expect('TYPE')
self._parse_type_name()
elif self._match_sequence(['TYPE', 'MAPPING']):
self._parse_type_name()
elif self._match('TYPE'):
self._parse_type_name()
elif (self._match_sequence(['EVENT', 'MONITOR']) or
self._match('NODEGROUP') or
self._match_sequence(['DATABASE', 'PARTITION', 'GROUP']) or
self._match('SCHEMA') or
self._match('TABLESPACE') or
self._match('TRIGGER')):
self._expect(TT.IDENTIFIER)
else:
self._expected_one_of([
'ALIAS',
'DATABASE',
'DISTINCT',
'EVENT',
'FUNCTION',
'INDEX',
'NICKNAME',
'NODEGROUP',
'PROCEDURE',
'SCHEMA',
'SEQUENCE',
'SPECIFIC',
'TABLE',
'TABLESPACE',
'TRIGGER',
'TYPE',
'VARIABLE',
'VIEW',
])
if self._match('USER'):
self._expect(TT.IDENTIFIER)
else:
self._expect_one_of([
(TT.REGISTER, 'USER'),
(TT.REGISTER, 'SESSION_USER'),
(TT.REGISTER, 'SYSTEM_USER'),
])
self._expect_sequence(['PERSERVE', 'PRIVILEGES'])
def _parse_truncate_statement(self):
"""Parses a TRUNCATE statement"""
# TRUNCATE already matched
self._match('TABLE')
self._parse_table_name()
if self._match_one_of(['DROP', 'REUSE']):
self._expect('STORAGE')
if self._match('IGNORE') or self._match_sequence(['RESTRICT', 'WHEN']):
self._expect_sequence(['DELETE', 'TRIGGERS'])
self._match_sequence(['CONTINUE', 'IDENTITY'])
self._expect('IMMEDIATE')
def _parse_update_statement(self):
"""Parses an UPDATE statement"""
# UPDATE already matched
if self._match('('):
self._indent()
self._parse_full_select()
self._outdent()
self._expect(')')
else:
self._parse_subschema_name()
# Ambiguity: INCLUDE is an identifier and hence can look like a table
# correlation name
reraise = False
self._save_state()
try:
# Try and parse a mandatory table correlation followed by a
# mandatory INCLUDE
self._parse_table_correlation(optional=False)
self._newline()
self._expect('INCLUDE')
reraise = True
self._expect('(')
self._indent()
self._parse_ident_type_list(newlines=True)
self._outdent()
self._expect(')')
except ParseError:
# If that fails, rewind and parse an optional INCLUDE or an
# optional table correlation
self._restore_state()
if reraise: raise
if self._match('INCLUDE'):
self._newline(-1)
self._expect('(')
self._indent()
self._parse_ident_type_list(newlines=True)
self._outdent()
self._expect(')')
else:
self._parse_table_correlation()
else:
self._forget_state()
# Parse mandatory assignment clause allow DEFAULT values
self._expect('SET')
self._indent()
self._parse_assignment_clause(allowdefault=True)
self._outdent()
if self._match('WHERE'):
self._indent()
self._parse_search_condition()
self._outdent()
if self._match('WITH'):
self._expect_one_of(['RR', 'RS', 'CS', 'UR'])
def _parse_while_statement(self, label=None):
"""Parses a WHILE-loop in a dynamic compound statement"""
# WHILE already matched
self._parse_search_condition(newlines=False)
self._newline()
self._expect('DO')
self._indent()
while True:
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
if self._match('END'):
self._outdent(-1)
break
else:
self._newline()
self._expect('WHILE')
if label:
self._match((TT.IDENTIFIER, label))
# COMPOUND STATEMENTS ####################################################
def _parse_compiled_statement(self):
"""Parses a procedure statement within a procedure body"""
# XXX Should PREPARE be supported here?
try:
label = self._expect(TT.LABEL).value
self._newline()
except ParseError:
label = None
# Procedure specific statements
if self._match('ALLOCATE'):
self._parse_allocate_cursor_statement()
elif self._match('ASSOCIATE'):
self._parse_associate_locators_statement()
elif self._match('BEGIN'):
self._parse_compiled_compound_statement(label=label)
elif self._match('CASE'):
self._parse_case_statement()
elif self._match('CLOSE'):
self._parse_close_statement()
elif self._match_sequence(['EXECUTE', 'IMMEDIATE']):
self._parse_execute_immediate_statement()
elif self._match('EXECUTE'):
self._parse_execute_statement()
elif self._match('FETCH'):
self._parse_fetch_statement()
elif self._match('GOTO'):
self._parse_goto_statement()
elif self._match('LOOP'):
self._parse_loop_statement(label=label)
elif self._match('PREPARE'):
self._parse_prepare_statement()
elif self._match('OPEN'):
self._parse_open_statement()
elif self._match('REPEAT'):
self._parse_repeat_statement(label=label)
# Dynamic compound specific statements
elif self._match('FOR'):
self._parse_for_statement(label=label)
elif self._match_sequence(['GET', 'DIAGNOSTICS']):
self._parse_get_diagnostics_statement()
elif self._match('IF'):
self._parse_if_statement()
elif self._match('ITERATE'):
self._parse_iterate_statement()
elif self._match('LEAVE'):
self._parse_leave_statement()
elif self._match('RETURN'):
self._parse_return_statement()
elif self._match('SET'):
self._parse_set_statement()
elif self._match('SIGNAL'):
self._parse_signal_statement()
elif self._match('WHILE'):
self._parse_while_statement(label=label)
# Generic SQL statements
elif self._match('AUDIT'):
self._parse_audit_statement()
elif self._match('CALL'):
self._parse_call_statement()
elif self._match_sequence(['COMMENT', 'ON']):
self._parse_comment_statement()
elif self._match('COMMIT'):
self._parse_commit_statement()
elif self._match('CREATE'):
self._match_sequence(['OR', 'REPLACE'])
if self._match('TABLE'):
self._parse_create_table_statement()
elif self._match('VIEW'):
self._parse_create_view_statement()
elif self._match('UNIQUE'):
self._expect('INDEX')
self._parse_create_index_statement()
elif self._match('INDEX'):
self._parse_create_index_statement()
else:
self._expected_one_of(['TABLE', 'VIEW', 'INDEX', 'UNIQUE'])
elif self._match_sequence(['DECLARE', 'GLOBAL', 'TEMPORARY', 'TABLE']):
self._parse_declare_global_temporary_table_statement()
elif self._match('DELETE'):
self._parse_delete_statement()
elif self._match('DROP'):
# XXX Limit this to tables, views and indexes somehow?
self._parse_drop_statement()
elif self._match('EXPLAIN'):
self._parse_explain_statement()
elif self._match_sequence(['FLUSH', 'OPTIMIZATION', 'PROFILE', 'CACHE']):
self._parse_flush_optimization_profile_cache_statement()
elif self._match_sequence(['FREE', 'LOCATOR']):
self._parse_free_locator_statement()
elif self._match('GRANT'):
self._parse_grant_statement()
elif self._match('INSERT'):
self._parse_insert_statement()
elif self._match_sequence(['LOCK', 'TABLE']):
self._parse_lock_table_statement()
elif self._match('MERGE'):
self._parse_merge_statement()
elif self._match('RELEASE'):
self._match('TO')
self._expect('SAVEPOINT')
self._parse_release_savepoint_statement()
elif self._match('RESIGNAL'):
self._parse_resignal_statement()
elif self._match('ROLLBACK'):
self._parse_rollback_statement()
elif self._match('SAVEPOINT'):
self._parse_savepoint_statement()
elif self._match_sequence(['TRANSFER', 'OWNERSHIP']):
self._parse_transfer_ownership_statement()
elif self._match('TRUNCATE'):
self._parse_truncate_statement()
elif self._match('UPDATE'):
self._parse_update_statement()
else:
self._parse_select_statement(allowinto=True)
def _parse_compiled_compound_statement(self, label=None):
"""Parses a procedure compound statement (body)"""
# BEGIN already matched
if self._match('NOT'):
self._expect('ATOMIC')
else:
self._match('ATOMIC')
self._indent()
# Ambiguity: there's several statements beginning with DECLARE that can
# occur mixed together or in a specific order here, so we use saved
# states to test for each consecutive block of DECLAREs
# Try and parse DECLARE variable|condition|return-code
while True:
reraise = False
self._save_state()
try:
self._expect('DECLARE')
if self._match('SQLSTATE'):
reraise = True
self._expect_one_of(['CHAR', 'CHARACTER'])
self._expect_sequence(['(', (TT.NUMBER, 5), ')'], prespace=False)
self._match_sequence(['DEFAULT', TT.STRING])
elif self._match('SQLCODE'):
reraise = True
self._expect_one_of(['INT', 'INTEGER'])
self._match_sequence(['DEFAULT', TT.NUMBER])
else:
count = len(self._parse_ident_list())
if count == 1 and self._match('CONDITION'):
reraise = True
self._expect('FOR')
if self._match('SQLSTATE'):
self._match('VALUE')
self._expect(TT.STRING)
else:
self._parse_datatype()
if self._match('DEFAULT'):
reraise = True
self._parse_expression()
self._expect((TT.TERMINATOR, ';'))
self._newline()
except ParseError:
self._restore_state()
if reraise: raise
break
else:
self._forget_state()
# Try and parse DECLARE statement
while True:
reraise = False
self._save_state()
try:
self._expect('DECLARE')
self._parse_ident_list()
self._expect('STATEMENT')
reraise = True
self._expect((TT.TERMINATOR, ';'))
self._newline()
except ParseError:
self._restore_state()
if reraise: raise
break
else:
self._forget_state()
# Try and parse DECLARE CURSOR
while True:
reraise = False
self._save_state()
try:
self._expect_sequence(['DECLARE', TT.IDENTIFIER, 'CURSOR'])
reraise = True
if self._match('WITH'):
if self._match('RETURN'):
self._expect('TO')
self._expect_one_of(['CALLER', 'CLIENT'])
else:
self._expect('HOLD')
if self._match('WITH'):
self._expect_sequence(['RETURN', 'TO'])
self._expect_one_of(['CALLER', 'CLIENT'])
self._expect('FOR')
# Ambiguity: statement name could be reserved word
self._save_state()
try:
# Try and parse a SELECT statement
# XXX Is SELECT INTO permitted in a DECLARE CURSOR?
self._parse_select_statement()
except ParseError:
# If that fails, rewind and parse a simple statement name
self._restore_state()
self._expect(TT.IDENTIFIER)
else:
self._forget_state()
self._expect((TT.TERMINATOR, ';'))
self._newline()
except ParseError:
self._restore_state()
if reraise: raise
break
else:
self._forget_state()
# Try and parse DECLARE HANDLER
while True:
reraise = False
self._save_state()
try:
self._expect('DECLARE')
self._expect_one_of(['CONTINUE', 'UNDO', 'EXIT'])
self._expect('HANDLER')
reraise = True
self._expect('FOR')
self._save_state()
try:
while True:
if self._match('NOT'):
self._expect('FOUND')
else:
self._expect_one_of(['NOT', 'SQLEXCEPTION', 'SQLWARNING'])
if not self._match(','):
break
except ParseError:
self._restore_state()
while True:
if self._match('SQLSTATE'):
self._match('VALUE')
self._expect(TT.STRING)
else:
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
else:
self._forget_state()
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
self._newline()
except ParseError:
self._restore_state()
if reraise: raise
break
else:
self._forget_state()
# Parse procedure statements
while not self._match('END'):
self._parse_compiled_statement()
self._expect((TT.TERMINATOR, ';'))
self._newline()
self._outdent(-1)
if label:
self._match((TT.IDENTIFIER, label))
def _parse_statement(self):
"""Parses a top-level statement in an SQL script"""
# XXX CREATE EVENT MONITOR
# If we're reformatting WHITESPACE, add a blank WHITESPACE token to the
# output - this will suppress leading whitespace in front of the first
# word of the statement
self._output.append(Token(TT.WHITESPACE, None, '', 0, 0))
if self._match('ALTER'):
if self._match('TABLE'):
self._parse_alter_table_statement()
elif self._match('SEQUENCE'):
self._parse_alter_sequence_statement()
elif self._match('FUNCTION'):
self._parse_alter_function_statement(specific=False)
elif self._match('PROCEDURE'):
self._parse_alter_procedure_statement(specific=False)
elif self._match('SPECIFIC'):
if self._match('FUNCTION'):
self._parse_alter_function_statement(specific=True)
elif self._match('PROCEDURE'):
self._parse_alter_procedure_statement(specific=True)
else:
self._expected_one_of(['FUNCTION', 'PROCEDURE'])
elif self._match('NICKNAME'):
self._parse_alter_nickname_statement()
elif self._match('TABLESPACE'):
self._parse_alter_tablespace_statement()
elif self._match('BUFFERPOOL'):
self._parse_alter_bufferpool_statement()
elif self._match_sequence(['DATABASE', 'PARTITION', 'GROUP']):
self._parse_alter_partition_group_statement()
elif self._match('DATABASE'):
self._parse_alter_database_statement()
elif self._match('NODEGROUP'):
self._parse_alter_partition_group_statement()
elif self._match('SERVER'):
self._parse_alter_server()
elif self._match_sequence(['HISTOGRAM', 'TEMPLATE']):
self._parse_alter_histogram_template_statement()
elif self._match_sequence(['AUDIT', 'POLICY']):
self._parse_alter_audit_policy_statement()
elif self._match_sequence(['SECURITY', 'LABEL', 'COMPONENT']):
self._parse_alter_security_label_component_statement()
elif self._match_sequence(['SECURITY', 'POLICY']):
self._parse_alter_security_policy_statement()
elif self._match_sequence(['SERVICE', 'CLASS']):
self._parse_alter_service_class_statement()
elif self._match('THRESHOLD'):
self._parse_alter_threshold_statement()
elif self._match_sequence(['TRUSTED', 'CONTEXT']):
self._parse_alter_trusted_context_statement()
elif self._match_sequence(['USER', 'MAPPING']):
self._parse_alter_user_mapping_statement()
elif self._match('VIEW'):
self._parse_alter_view_statement()
elif self._match_sequence(['WORK', 'ACTION', 'SET']):
self._parse_alter_work_action_set_statement()
elif self._match_sequence(['WORK', 'CLASS', 'SET']):
self._parse_alter_work_class_set_statement()
elif self._match('WORKLOAD'):
self._parse_alter_workload_statement()
elif self._match('WRAPPER'):
self._parse_alter_wrapper_statement()
elif self._match('MODULE'):
self._parse_alter_module_statement()
else:
self._expected_one_of([
'AUDIT',
'BUFFERPOOL',
'DATABASE',
'FUNCTION',
'HISTOGRAM',
'MODULE',
'NICKNAME',
'NODEGROUP',
'PROCEDURE',
'SECURITY',
'SEQUENCE',
'SERVER',
'SERVICE',
'SPECIFIC',
'TABLE',
'TABLESPACE',
'THRESHOLD',
'TRUSTED',
'USER',
'VIEW',
'WORK',
'WORKLOAD',
'WRAPPER',
])
elif self._match('AUDIT'):
self._parse_audit_statement()
elif self._match('BEGIN'):
self._parse_compiled_compound_statement()
elif self._match('CALL'):
self._parse_call_statement()
elif self._match_sequence(['COMMENT', 'ON']):
self._parse_comment_statement()
elif self._match('COMMIT'):
self._parse_commit_statement()
elif self._match('CREATE'):
self._match_sequence(['OR', 'REPLACE'])
if self._match('TABLE'):
self._parse_create_table_statement()
elif self._match('VIEW'):
self._parse_create_view_statement()
elif self._match('ALIAS'):
self._parse_create_alias_statement()
elif self._match_sequence(['UNIQUE', 'INDEX']):
self._parse_create_index_statement(unique=True)
elif self._match('INDEX'):
self._parse_create_index_statement(unique=False)
elif self._match('DISTINCT'):
self._expect('TYPE')
self._parse_create_type_statement()
elif self._match('SEQUENCE'):
self._parse_create_sequence_statement()
elif self._match_sequence(['FUNCTION', 'MAPPING']):
self._parse_create_function_mapping_statement()
elif self._match('FUNCTION'):
self._parse_create_function_statement()
elif self._match('PROCEDURE'):
self._parse_create_procedure_statement()
elif self._match('TABLESPACE'):
self._parse_create_tablespace_statement()
elif self._match('BUFFERPOOL'):
self._parse_create_bufferpool_statement()
elif self._match_sequence(['DATABASE', 'PARTITION', 'GROUP']):
self._parse_create_database_partition_group_statement()
elif self._match('NODEGROUP'):
self._parse_create_database_partition_group_statement()
elif self._match('TRIGGER'):
self._parse_create_trigger_statement()
elif self._match('SCHEMA'):
self._parse_create_schema_statement()
elif self._match_sequence(['AUDIT', 'POLICY']):
self._parse_create_audit_policy_statement()
elif self._match_sequence(['EVENT', 'MONITOR']):
self._parse_create_event_monitor_statement()
elif self._match_sequence(['HISTOGRAM', 'TEMPLATE']):
self._parse_create_histogram_template_statement()
elif self._match('NICKNAME'):
self._parse_create_nickname_statement()
elif self._match('ROLE'):
self._parse_create_role_statement()
elif self._match_sequence(['SECURITY', 'LABEL', 'COMPONENT']):
self._parse_create_security_label_component_statement()
elif self._match_sequence(['SECURITY', 'LABEL']):
self._parse_create_security_label_statement()
elif self._match_sequence(['SECURITY', 'POLICY']):
self._parse_create_security_policy_statement()
elif self._match_sequence(['SERVICE', 'CLASS']):
self._parse_create_service_class_statement()
elif self._match('SERVER'):
self._parse_create_server_statement()
elif self._match('THRESHOLD'):
self._parse_create_threshold_statement()
elif self._match_sequence(['TRUSTED', 'CONTEXT']):
self._parse_create_trusted_context_statement()
elif self._match_sequence(['TYPE', 'MAPPING']):
self._parse_create_type_mapping_statement()
elif self._match('TYPE'):
self._parse_create_type_statement()
elif self._match_sequence(['USER', 'MAPPING']):
self._parse_create_user_mapping_statement()
elif self._match('VARIABLE'):
self._parse_create_variable_statement()
elif self._match_sequence(['WORK', 'ACTION', 'SET']):
self._parse_create_work_action_set_statement()
elif self._match_sequence(['WORK', 'CLASS', 'SET']):
self._parse_create_work_class_set_statement()
elif self._match('WORKLOAD'):
self._parse_create_workload_statement()
elif self._match('WRAPPER'):
self._parse_create_wrapper_statement()
elif self._match('MODULE'):
self._parse_create_module_statement()
else:
tbspacetype = self._match_one_of([
'REGULAR',
'LONG',
'LARGE',
'TEMPORARY',
'USER',
'SYSTEM',
])
if tbspacetype:
tbspacetype = tbspacetype.value
if tbspacetype in ('USER', 'SYSTEM'):
self._expect('TEMPORARY')
elif tbspacetype == 'TEMPORARY':
tbspacetype = 'SYSTEM'
elif tbspacetype == 'LONG':
tbspacetype = 'LARGE'
self._expect('TABLESPACE')
self._parse_create_tablespace_statement(tbspacetype)
else:
self._expected_one_of([
'ALIAS',
'AUDIT',
'BUFFERPOOL',
'DATABASE',
'DISTINCT',
'EVENT',
'FUNCTION',
'INDEX',
'MODULE',
'NICKNAME',
'NODEGROUP',
'PROCEDURE',
'ROLE',
'SECURITY',
'SEQUENCE',
'SERVER',
'SERVICE',
'TABLE',
'TABLESPACE',
'THRESHOLD',
'TRIGGER',
'TRUSTED',
'TYPE',
'UNIQUE',
'USER',
'VARIABLE',
'VIEW',
'WORK',
'WORKLOAD',
'WRAPPER',
])
elif self._match('DELETE'):
self._parse_delete_statement()
elif self._match('DROP'):
self._parse_drop_statement()
elif self._match_sequence(['DECLARE', 'GLOBAL', 'TEMPORARY', 'TABLE']):
self._parse_declare_global_temporary_table_statement()
elif self._match('DECLARE'):
self._parse_declare_cursor_statement()
elif self._match('EXPLAIN'):
self._parse_explain_statement()
elif self._match_sequence(['FLUSH', 'OPTIMIZATION', 'PROFILE', 'CACHE']):
self._parse_flush_optimization_profile_cache_statement()
elif self._match_sequence(['FREE', 'LOCATOR']):
self._parse_free_locator_statement()
elif self._match('GRANT'):
self._parse_grant_statement()
elif self._match('INSERT'):
self._parse_insert_statement()
elif self._match_sequence(['LOCK', 'TABLE']):
self._parse_lock_table_statement()
elif self._match('MERGE'):
self._parse_merge_statement()
elif self._match_sequence(['REFRESH', 'TABLE']):
self._parse_refresh_table_statement()
elif self._match('RELEASE'):
self._match('TO')
self._expect('SAVEPOINT')
self._parse_release_savepoint_statement()
elif self._match_sequence(['RENAME', 'TABLESPACE']):
self._parse_rename_tablespace_statement()
elif self._match('RENAME'):
self._parse_rename_statement()
elif self._match('REVOKE'):
self._parse_revoke_statement()
elif self._match('ROLLBACK'):
self._parse_rollback_statement()
elif self._match('SAVEPOINT'):
self._parse_savepoint_statement()
elif self._match_sequence(['SET', 'INTEGRITY']):
self._parse_set_integrity_statement()
elif self._match('SET'):
self._parse_set_statement()
elif self._match_sequence(['TRANSFER', 'OWNERSHIP']):
self._parse_transfer_ownership_statement()
elif self._match('TRUNCATE'):
self._parse_truncate_statement()
elif self._match('UPDATE'):
self._parse_update_statement()
else:
self._parse_select_statement()
def parse_routine_prototype(self, tokens):
"""Parses a routine prototype"""
# It's a bit of hack sticking this here. This method doesn't really
# belong here and should probably be in a sub-class (it's only used
# for syntax highlighting function prototypes in the documentation
# system)
self._parse_init(tokens)
# Skip leading whitespace
if self._token().type in (TT.COMMENT, TT.WHITESPACE):
self._index += 1
self._parse_function_name()
# Parenthesized parameter list is mandatory
self._expect('(', prespace=False)
if not self._match(')'):
while True:
self._match_one_of(['IN', 'OUT', 'INOUT'])
self._save_state()
try:
self._expect(TT.IDENTIFIER)
self._parse_datatype()
except ParseError:
self._restore_state()
self._parse_datatype()
else:
self._forget_state()
if not self._match(','):
break
self._expect(')')
# Parse the return type
if self._match('RETURNS'):
if self._match_one_of(['ROW', 'TABLE']):
self._expect('(')
self._parse_ident_type_list()
self._expect(')')
else:
self._parse_datatype()
self._parse_finish()
return self._output
Connection = namedtuple('Connection', ('instance', 'database', 'username', 'password'))
class DB2ZOSScriptParser(DB2ZOSParser):
"""Parser which handles the DB2 UDB CLP dialect.
This class inherits from the DB2 SQL language parser and as such is capable
of parsing all the statements that the parent class is capable of. In
addition, it adds the ability to parse the non-SQL CLP commands (like
IMPORT, EXPORT, LOAD, CREATE DATABASE, etc).
"""
def __init__(self):
super(DB2ZOSScriptParser, self).__init__()
self.connections = []
self.produces = []
self.consumes = []
self.current_user = None
self.current_instance = None
self.current_connection = None
def _match_clp_string(self, password=False):
"""Attempts to match the current tokens as a CLP-style string.
The _match_clp_string() method is used to match a CLP-style string.
The "real" CLP has a fundamentally different style of parser to the
DB2 SQL parser, and includes several behaviours that are difficult
to replicate in this parser (which was primarily targetted at the
DB2 SQL dialect). One of these is the CLP's habit of treating an
unquoted run of non-whitespace tokens as a string, or allowing a
quoted identifier to be treated as a string.
When this method is called it will return a STRING token consisting
of the content of the aforementioned tokens (or None if a CLP-style
string is not found in the source at the current position).
"""
token = self._token()
if token.type == TT.STRING:
# STRINGs are treated verbatim
self._index += 1
elif token.type == TT.IDENTIFIER and token.source[0] == '"':
# Double quoted identifier are converted to STRING tokens
token = Token(TT.STRING, token.value, quote_str(token.value, "'"), token.line, token.column)
self._index += 1
elif not token.type in (TT.TERMINATOR, TT.EOF):
# Otherwise, any run of non-whitepace tokens is converted to a
# single STRING token
start = self._index
self._index += 1
while True:
token = self._token()
if token.type == TT.STRING:
raise ParseError(self._tokens, token, "Quotes (') not permitted in identifier")
if token.type == TT.IDENTIFIER and token.source[0] == '"':
raise ParseError(self._tokens, token, 'Quotes (") not permitted in identifier')
if token.type in (TT.WHITESPACE, TT.COMMENT, TT.TERMINATOR, TT.EOF):
break
self._index += 1
content = ''.join([token.source for token in self._tokens[start:self._index]])
token = Token(TT.STRING, content, quote_str(content, "'"), self._tokens[start].line, self._tokens[start].column)
else:
token = None
if token:
if not (self._output and self._output[-1].type in (TT.INDENT, TT.WHITESPACE)):
self._output.append(Token(TT.WHITESPACE, None, ' ', 0, 0))
if password:
token = Token(TT.PASSWORD, token.value, token.source, token.line, token.column)
self._output.append(token)
# Skip WHITESPACE and COMMENTS
while self._token().type in (TT.COMMENT, TT.WHITESPACE):
if self._token().type == TT.COMMENT or TT.WHITESPACE not in self.reformat:
self._output.append(self._token())
self._index += 1
return token
def _expect_clp_string(self, password=False):
"""Matches the current tokens as a CLP-style string, or raises an error.
See _match_clp_string() above for details of the algorithm.
"""
result = self._match_clp_string(password)
if not result:
raise ParseExpectedOneOfError(self._tokens, self._token(), [TT.PASSWORD if password else TT.STRING])
return result
# PATTERNS ###############################################################
def _parse_clp_string_list(self):
"""Parses a comma separated list of strings.
This is a common pattern in CLP, for example within the LOBS TO clause of
the EXPORT command. The method returns the list of strings found.
"""
result = []
while True:
result.append(self._expect_clp_string().value)
if not self._match(','):
break
return result
def _parse_number_list(self):
"""Parses a comma separated list of number.
This is a common pattern in CLP, for example within the METHOD clause of
the IMPORT or LOAD commands. The method returns the list of numbers
found.
"""
result = []
while True:
result.append(self._expect(TT.NUMBER).value)
if not self._match(','):
break
return result
def _parse_login(self, optional=True, allowchange=False):
"""Parses a set of login credentials"""
username = None
password = None
if self._match('USER'):
username = self._expect_clp_string().value
if self._match('USING'):
password = self._expect_clp_string(password=True).value
if allowchange:
if self._match('NEW'):
password = self._expect_clp_string(password=True).value
self._expect('CONFIRM')
self._expect_clp_string(password=True)
else:
self._match_sequence(['CHANGE', 'PASSWORD'])
elif not optional:
self._expected('USER')
return (username, password)
# COMMANDS ###############################################################
def _parse_activate_database_command(self):
"""Parses an ACTIVATE DATABASE command"""
# ACTIVATE [DATABASE|DB] already matched
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_add_contact_command(self):
"""Parses an ADD CONTACT command"""
# ADD CONTACT already matched
self._expect_clp_string()
self._expect('TYPE')
if self._expect_one_of(['EMAIL', 'PAGE']).value == 'PAGE':
if self._match_sequence(['MAXIMUM', 'PAGE', 'LENGTH']) or self._match_sequence(['MAX', 'LEN']):
self._expect(TT.NUMBER)
self._expect('ADDRESS')
self._expect_clp_string()
if self._match('DESCRIPTION'):
self._expect_clp_string()
def _parse_add_contactgroup_command(self):
"""Parses an ADD CONTACTGROUP command"""
# ADD CONTACTGROUP already matched
self._expect_clp_string()
while True:
self._expect_one_of(['CONTACT', 'GROUP'])
self._expect_clp_string()
if not self_match(','):
break
if self._match('DESCRIPTION'):
self._expect_clp_string()
def _parse_add_dbpartitionnum_command(self):
"""Parses an ADD DBPARTITIONNUM command"""
# ADD DBPARTITIONNUM already matched
if self._match('LIKE'):
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
elif self._match('WITHOUT'):
self._expect('TABLESPACES')
def _parse_add_xmlschema_document_command(self):
"""Parses an ADD XMLSCHEMA DOCUMENT command"""
# ADD XMLSCHEMA DOCUMENT already matched
self._expect('TO')
self._parse_subschema_name()
self._expect('ADD')
self._expect_clp_string()
self._expect('FROM')
self._expect_clp_string()
if self._match('WITH'):
self._expect_clp_string()
if self._match('COMPLETE'):
if self._match('WITH'):
self._expect_clp_string()
self._match_sequence(['ENABLE', 'DECOMPOSITION'])
def _parse_archive_log_command(self):
"""Parses an ARCHIVE LOG command"""
# ARCHIVE LOG already matched
self._expect('FOR')
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
if self._match('USER'):
self._expect_clp_string()
if self._match('USING'):
self._expect_clp_string()
self._parse_db_partitions_clause()
def _parse_attach_command(self):
"""Parses an ATTACH command"""
# ATTACH already matched
if self._match('TO'):
self._expect_clp_string()
self._parse_login(optional=True, allowchange=True)
def _parse_autoconfigure_command(self):
"""Parses an AUTOCONFIGURE command"""
# AUTOCONFIGURE already matched
if self._match('USING'):
while True:
self._expect(TT.IDENTIFIER)
self._expect_one_of([TT.NUMBER, TT.STRING, TT.IDENTIFIER])
if self._match('APPLY'):
break
else:
self._expect('APPLY')
if self._match('DB'):
if self._match('AND'):
self._expect('DBM')
else:
self._expect('ONLY')
elif self._match('NONE'):
pass
else:
self._expected_one_of(['DB', 'NONE'])
self._match_sequence(['ON', 'CURRENT', 'NODE'])
def _parse_backup_command(self):
"""Parses a BACKUP DB command"""
# BACKUP [DATABASE|DB] already matched
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
self._parse_db_partitions_clause()
if self._match('TABLESPACE'):
self._expect('(')
self._parse_ident_list()
self._expect(')')
self._match('ONLINE')
if self._match('INCREMENTAL'):
self._match('DELTA')
if self._match('USE'):
if self._match('SNAPSHOT'):
if self._match('LIBRARY'):
self._expect_clp_string()
elif self._match_one_of(['TSM', 'XBSA']):
if self._match('OPEN'):
self._expect(TT.NUMBER)
self._expect('SESSIONS')
if self._match('OPTIONS'):
self._expect_clp_string()
# XXX Add support for @filename response file
elif self._match('TO'):
self._parse_clp_string_list()
elif self._match('LOAD'):
self._expect_clp_string()
if self._match('OPEN'):
self._expect(TT.NUMBER)
self._expect('SESSIONS')
if self._match('OPTIONS'):
self._expect_clp_string()
# XXX Add support for @filename response file
self._match('DEDUP_DEVICE')
if self._match('WITH'):
self._expect(TT.NUMBER)
self._expect('BUFFERS')
if self._match('BUFFER'):
self._expect(TT.NUMBER)
if self._match('PARALLELISM'):
self._expect(TT.NUMBER)
if self._match('COMPRESS'):
if self._match('COMPRLIB'):
self._expect_clp_string()
self._match('EXCLUDE')
if self._match('COMPROPTS'):
self._expect_clp_string()
if self._match('UTIL_IMPACT_PRIORITY'):
self._match(TT.NUMBER)
if self._match_one_of(['EXCLUDE', 'INCLUDE']):
self._expect('LOGS')
if self._match('WITHOUT'):
self._expect('PROMPTING')
# XXX Add support for BIND command
def _parse_catalog_command(self):
"""Parses a CATALOG command"""
# CATALOG already matched
if self._match_one_of(['USER', 'SYSTEM']):
self._expect('ODBC')
if self._match_sequence(['DATA', 'SOURCE']):
self._expect_clp_string()
else:
self._expect_sequence(['ALL', 'DATA', 'SOURCES'])
elif self._match('ODBC'):
if self._match_sequence(['DATA', 'SOURCE']):
self._expect_clp_string()
else:
self._expect_sequence(['ALL', 'DATA', 'SOURCES'])
elif self._match_one_of(['DATABASE', 'DB']):
self._expect_clp_string()
if self._match('AS'):
self._expect_clp_string()
if self._match('ON'):
self._expect_clp_string()
elif self._match_sequence(['AT', 'NODE']):
self._expect_clp_string()
if self._match('AUTHENTICATION'):
if self._match_sequence(['KERBEROS', 'TARGET', 'PRINCIPAL']):
self._expect_clp_string()
else:
self._expect_one_of([
'SERVER',
'CLIENT',
'SERVER_ENCRYPT',
'SERVER_ENCRYPT_AES',
'KERBEROS',
'DATA_ENCRYPT',
'DATA_ENCRYPT_CMP',
'GSSPLUGIN',
'DCS',
'DCS_ENCRYPT',
])
if self._match('WITH'):
self._expect_clp_string()
elif self._match('DCS'):
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
if self._match('AS'):
self._expect_clp_string()
if self._match('AR'):
self._expect_clp_string()
if self._match('PARMS'):
self._expect_clp_string()
if self._match('WITH'):
self._expect_clp_string()
elif self._match('LDAP'):
if self._match_one_of(['DATABASE', 'DB']):
self._expect_clp_string()
if self._match('AS'):
self._expect_clp_string()
if self._match_sequence(['AT', 'NODE']):
self._expect_clp_string()
if self._match('GWNODE'):
self._expect_clp_string()
if self._match('PARMS'):
self._expect_clp_string()
if self._match('AR'):
self._expect_clp_string()
if self._match_sequence(['KERBEROS', 'TARGET', 'PRINCIPAL']):
self._expect_clp_string()
else:
self._expect_one_of([
'SERVER',
'CLIENT',
'SERVER_ENCRYPT',
'SERVER_ENCRYPT_AES',
'KERBEROS',
'DCS',
'DCS_ENCRYPT',
'DATA_ENCRYPT',
'GSSPLUGIN',
])
if self._match('WITH'):
self._expect_clp_string()
elif self._match('NODE'):
self._expect_clp_string()
self._expect('AS')
self._expect_clp_string()
else:
self._expected_one_of(['DATABASE', 'DB', 'NODE'])
self._parse_login(optional=True, allowchange=False)
else:
self._match('ADMIN')
if self._match_sequence(['LOCAL', 'NODE']):
self._expect_clp_string()
if self._match('INSTANCE'):
self._expect_clp_string()
if self._match('SYSTEM'):
self._expect_clp_string()
if self._match('OSTYPE'):
self._expect(TT.IDENTIFIER)
if self._match('WITH'):
self._expect_clp_string()
elif self._match_sequence(['NPIPE', 'NODE']):
self._expect_clp_string()
self._expect('REMOTE')
self._expect_clp_string()
self._expect('INSTANCE')
self._expect_clp_string()
if self._match('SYSTEM'):
self._expect_clp_string()
if self._match('OSTYPE'):
self._expect(TT.IDENTIFIER)
if self._match('WITH'):
self._expect_clp_string()
elif self._match_sequence(['NETBIOS', 'NODE']):
self._expect_clp_string()
self._expect('REMOTE')
self._expect_clp_string()
self._expect('ADAPTER')
self._expect(TT.NUMBER)
if self._match('REMOTE_INSTANCE'):
self._expect_clp_string()
if self._match('SYSTEM'):
self._expect_clp_string()
if self._match('OSTYPE'):
self._expect(TT.IDENTIFIER)
if self._match('WITH'):
self._expect_clp_string()
elif self._match_one_of(['TCPIP', 'TCPIP4', 'TCPIP6']):
self._expect('NODE')
self._expect_clp_string()
self._expect('REMOTE')
self._expect_clp_string()
self._expect('SERVER')
self._expect_clp_string()
if self._match('SECURITY'):
self._match_one_of(['SOCKS', 'SSL'])
if self._match('REMOTE_INSTANCE'):
self._expect_clp_string()
if self._match('SYSTEM'):
self._expect_clp_string()
if self._match('OSTYPE'):
self._expect(TT.IDENTIFIER)
if self._match('WITH'):
self._expect_clp_string()
else:
self._expected_one_of([
'LOCAL',
'NPIPE',
'NETBIOS',
'TCPIP',
'TCPIP4',
'TCPIP6',
])
def _parse_connect_command(self):
"""Parses a CONNECT command"""
# CONNECT already matched
if self._expect_one_of(['TO', 'RESET']).value == 'RESET':
self.current_connection = None
else:
database = self._expect_clp_string().value
if self._match('IN'):
if self._expect_one_of(['SHARE', 'EXCLUSIVE']).value == 'EXCLUSIVE':
self._expect('MODE')
if self._match('ON'):
self._expect('SINGLE')
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
else:
self._expect('MODE')
(username, password) = self._parse_login(optional=True, allowchange=True)
self.current_connection = Connection(self.current_instance, database, username, password)
self.connections.append(self.current_connection)
def _parse_create_database_command(self):
"""Parses a CREATE DATABASE command"""
def parse_tablespace_definition():
self._expect('MANAGED')
self._expect('BY')
if self._match('SYSTEM'):
self._expect('USING')
self._parse_system_container_clause()
elif self._match('DATABASE'):
self._expect('USING')
self._parse_database_container_clause()
elif self._match('AUTOMATIC'):
self._expect('STORAGE')
if self._match('EXTENTSIZE'):
self._expect(TT.NUMBER)
self._match_one_of(['K', 'M'])
if self._match('PREFETCHSIZE'):
self._expect(TT.NUMBER)
self._match_one_of(['K', 'M', 'G'])
if self._match('OVERHEAD'):
self._expect(TT.NUMBER)
if self._match('TRANSFERRATE'):
self._expect(TT.NUMBER)
if self._match('NO'):
self._expect_sequence(['FILE', 'SYSTEM', 'CACHING'])
elif self._match('FILE'):
self._expect_sequence(['SYSTEM', 'CACHING'])
self._parse_tablespace_size_attributes()
# CREATE [DATABASE|DB] already matched
self._expect_clp_string()
# XXX Implement AT DBPARTITIONNUM? (not for general use, etc.)
if self._match('AUTOMATIC'):
self._expect('STORAGE')
self._expect_one_of(['NO', 'YES'])
if self._match('ON'):
self._parse_clp_string_list()
if self._match('DBPATH'):
self._expect('ON')
self._expect_clp_string()
if self._match('ALIAS'):
self._expect_clp_string()
if self._match('USING'):
self._expect('CODESET')
self._expect_clp_string()
if self._match('TERRITORY'):
self._expect_clp_string()
if self._match('COLLATE'):
self._expect('USING')
self._expect(TT.IDENTIFIER)
if self._match('PAGESIZE'):
self._expect(TT.NUMBER)
self._match('K')
if self._match('NUMSEGS'):
self._expect(TT.NUMBER)
if self._match('DFT_EXTENT_SZ'):
self._expect(TT.NUMBER)
self._match('RESTRICTIVE')
if self._match('CATALOG'):
self._expect('TABLESPACE')
parse_tablespace_definition()
if self._match('USER'):
self._expect('TABLESPACE')
parse_tablespace_definition()
if self._match('TEMPORARY'):
self._expect('TABLESPACE')
parse_tablespace_definition()
if self._match('WITH'):
self._expect_clp_string()
if self._match('AUTOCONFIGURE'):
self._parse_autoconfigure_command()
def _parse_create_tools_catalog_command(self):
"""Parses a CREATE TOOLS CATALOG command"""
# CREATE TOOLS CATALOG already matched
self._expect_clp_string()
if self._match('CREATE'):
self._expect('NEW')
self._expect('DATABASE')
self._expect_clp_string()
elif self._match('USE'):
self._expect('EXISTING')
if self._match('TABLESPACE'):
self._expect(TT.IDENTIFIER)
self._expect('DATABASE')
self._expect_clp_string()
self._match('FORCE')
if self._match('KEEP'):
self._expect('INACTIVE')
def _parse_deactivate_database_command(self):
"""Parses a DEACTIVATE DATABASE command"""
# DEACTIVATE [DATABASE|DB] already matched
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_decompose_xml_document(self):
"""Parses a DECOMPOSE XML DOCUMENT command"""
# DECOMPOSE XML DOCUMENT already matched
self._expect_clp_string()
self._expect('XMLSCHEMA')
self._parse_subschema_name()
self._match('VALIDATE')
def _parse_decompose_xml_documents(self):
"""Parses a DECOMPOSE XML DOCUMENTS command"""
# DECOMPOSE XML DOCUMENTS already matched
self._expect('IN')
self._parse_select_statement()
self._expect('XMLSCHEMA')
self._parse_subschema_name()
self._match('VALIDATE')
if self._match('ALLOW'):
self._match('NO')
self._expect('ACCESS')
if self._match('COMMITCOUNT'):
self._expect(TT.NUMBER)
self._match_sequence(['CONTINUE', 'ON', 'ERROR'])
if self._match('MESSAGES'):
self._expect_clp_string()
def _parse_deregister_command(self):
"""Parses a DEREGISTER command"""
# DEREGISTER already matched
self._match_sequence(['DB2', 'SERVER'])
self._match('IN')
self._expect_sequence(['LDAP', 'NODE', TT.IDENTIFIER])
self._parse_login(optional=True, allowchange=False)
def _parse_describe_command(self):
"""Parses a DESCRIBE command"""
# DESCRIBE already matched
table = True
if self._match('TABLE'):
pass
elif self._match_sequence(['INDEXES', 'FOR', 'TABLE']):
pass
elif self._match_sequence(['RELATIONAL', 'DATA']) or self._match_sequence(['XML', 'DATA']) or self._match_sequence(['TEXT', 'SEARCH']):
self._expect_sequence(['INDEXES', 'FOR', 'TABLE'])
elif self._match_sequence(['DATA', 'PARTITIONS', 'FOR', 'TABLE']):
pass
else:
table = False
if table:
self._parse_table_name()
self._match_sequence(['SHOW', 'DETAIL'])
else:
self._match('OUTPUT')
self._save_state()
try:
self._parse_select_statement()
except ParseError:
self._restore_state()
self._parse_call_statement()
else:
self._forget_state()
# XXX Add support for XQUERY?
def _parse_detach_command(self):
"""Parses a DETACH command"""
# DETACH already matched
pass
def _parse_disconnect_command(self):
"""Parses a DISCONNECT command"""
# DISCONNECT already matched
if self._match('ALL'):
self._match('SQL')
self.current_connection = None
elif self._match('CURRENT'):
self.current_connection = None
else:
t = self._expect_clp_string()
if isinstance(self.current_connection.database, basestring) and s.lower() == t.value.lower():
self.current_connection = None
def _parse_drop_contact_command(self):
"""Parses a DROP CONTACT command"""
# DROP CONTACT already matched
self._expect_clp_string()
def _parse_drop_contactgroup_command(self):
"""Parses a DROP CONTACTGROUP command"""
# DROP CONTACTGROUP already matched
self._expect_clp_string()
def _parse_drop_database_command(self):
"""Parses a DROP DATABASE command"""
# DROP [DATABASE|DB] already matched
self._expect_clp_string()
if self._match('AT'):
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
def _parse_drop_dbpartitionnum_verify_command(self):
"""Parses a DROP DBPARTITIONNUM VERIFY command"""
# DROP DBPARTITIONNUM VERIFY already matched
pass
def _parse_drop_tools_catalog_command(self):
"""Parses a DROP TOOLS CATALOG command"""
# DROP TOOLS CATALOG already matched
self._expect_clp_string()
self._expect('IN')
self._expect('DATABASE')
self._expect_clp_string()
self._match('FORCE')
def _parse_echo_command(self):
"""Parses an ECHO command"""
# ECHO already matched
self._match_clp_string()
def _parse_export_command(self):
"""Parses a EXPORT command"""
# EXPORT already matched
self._expect('TO')
self.produces.append((self._expect_clp_string().value, self.current_connection))
self._expect('OF')
self._expect_one_of(['DEL', 'IXF', 'WSF'])
if self._match('LOBS'):
self._expect('TO')
self._parse_clp_string_list()
if self._match('LOBFILE'):
self._parse_clp_string_list()
if self._match_sequence(['XML', 'TO']):
self._parse_clp_string_list()
if self._match('XMLFILE'):
self._parse_clp_string_list()
if self._match('MODIFIED'):
self._expect('BY')
# The syntax of MODIFIED BY is so incongruous with the parser that
# we don't even try and parse it, just skip tokens until we find
# some "normal" syntax again. Unfortunately, this means the error
# handling becomes rather dumb
i = self._index
while True:
if self._token(i).value in [
'XMLSAVESCHEMA',
'METHOD',
'MESSAGES',
'HIERARCHY',
'WITH',
'SELECT',
'VALUES',
]:
while self._index < i:
self._output.append(self._token())
self._index += 1
break
if self._token(i).type == TT.EOF:
raise ParseError("Unable to find end of file-modifiers in EXPORT statement")
i += 1
self._match('XMLSAVESCHEMA')
if self._match('METHOD'):
self._expect('N')
self._expect('(')
self._parse_ident_list()
self._expect(')')
if self._match('MESSAGES'):
self._expect_clp_string()
if self._match('HIERARCHY'):
if self._match('STARTING'):
self._expect(TT.IDENTIFIER)
else:
self._expect('(')
self._parse_ident_list()
self._expect(')')
if self._match('WHERE'):
self._parse_search_condition()
else:
self._parse_select_statement()
# XXX Add support for XQUERY?
def _parse_force_application_command(self):
"""Parses a FORCE APPLICATION command"""
# FORCE APPLICATION already matched
if self._match('('):
self._parse_number_list()
self._expect(')')
else:
self._expect('ALL')
if self._match('MODE'):
self._expect('ASYNC')
def _parse_get_admin_cfg_command(self):
"""Parses a GET ADMIN CFG command"""
# GET ADMIN [CONFIGURATION|CONFIG|CFG] already matched
if self._match('FOR'):
self._expect_sequence(['NODE', TT.IDENTIFIER])
self._parse_login(optional=True, allowchange=False)
def _parse_get_alert_cfg_command(self):
"""Parses a GET ALERT CFG command"""
# GET ALERT [CONFIGURATION|CONFIG|CFG] already matched
self._expect('FOR')
if (
self._match_sequence(['DATABASE', 'MANAGER'])
or self._match_sequence(['DB', 'MANAGER'])
or self._match_one_of(['DBM', 'DATABASES', 'CONTAINERS', 'TABLESPACES'])
):
self._match('DEFAULT')
elif (
self._match('DATABASE')
or self._match_sequence(['TABLESPACE', TT.IDENTIFIER])
or self._match_sequence(['CONTAINER', TT.IDENTIFIER, 'FOR', TT.IDENTIFIER])
):
self._expect('ON')
self._expect_clp_string()
else:
self._expected_one_of([
'DB',
'DBM',
'DATABASE',
'DATABASES',
'TABLESPACE',
'TABLESPACES',
'CONTAINER',
'CONTAINERS',
])
if self._match('USING'):
self._parse_clp_string_list()
def _parse_get_cli_cfg_command(self):
"""Parses a GET CLI CFG command"""
# GET CLI [CONFIGURATION|CONFIG|CFG] already matched
self._match_sequence(['AT', 'GLOBAL', 'LEVEL'])
if self._match_sequence(['FOR', 'SECTION']):
self._expect_clp_string()
def _parse_get_connection_state_command(self):
"""Parses a GET CONNECTION STATE command"""
# GET CONNECTION STATE already matched
pass
def _parse_get_contactgroup_command(self):
"""Parses a GET CONTACTGROUP command"""
# GET CONTACTGROUP already matched
self._expect_clp_string()
def _parse_get_contactgroups_command(self):
"""Parses a GET CONTACTGROUPS command"""
# GET CONTACTGROUPS already matched
pass
def _parse_get_contacts_command(self):
"""Parses a GET CONTACTS command"""
# GET CONTACTS already matched
pass
def _parse_get_db_cfg_command(self):
"""Parses a GET DB CFG command"""
# GET [DATABASE|DB] [CONFIGURATION|CONFIG|CFG] already matched
if self._match('FOR'):
self._expect_clp_string()
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_get_dbm_cfg_command(self):
"""Parses a GET DBM CFG command"""
# GET [DATABASE MANAGER|DB MANAGER|DBM] [CONFIGURATION|CONFIG|CFG] already matched
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_get_dbm_monitor_switches_command(self):
"""Parses a GET DBM MONITOR SWITCHES command"""
# GET [DATABASE MANAGER|DB MANAGER|DBM] MONITOR SWITCHES already matched
self._parse_db_partition_clause()
def _parse_get_description_for_health_indicator_command(self):
"""Parses a GET DESCRIPTION FOR HEALTH INDICATOR command"""
# GET DESCRIPTION FOR HEALTH INDICATOR already matched
self._expect_clp_string()
def _parse_get_notification_list_command(self):
"""Parses a GET NOTIFICATION LIST command"""
# GET [HEALTH] NOTIFICATION [CONTACT] LIST already matched
pass
def _parse_get_health_snapshot_command(self):
"""Parses a GET HEALTH SNAPSHOT command"""
# GET HEALTH SNAPSHOT already matched
self._expect('FOR')
if (
self._match_sequence(['DATABASE', 'MANAGER'])
or self._match_sequence(['DB', 'MANAGER'])
or self._match('DBM')
or self._match_sequence(['ALL', 'DATABASES'])
):
pass
elif self._match_one_of(['ALL', 'DATABASE', 'DB', 'TABLESPACES']):
self._expect('ON')
self._expect_clp_string()
else:
self._expected_one_of([
'DB',
'DATABASE',
'DBM',
'ALL',
'TABLESPACES',
])
self._parse_db_partition_clause()
self._match_sequence(['SHOW', 'DETAIL'])
self._match_sequence(['WITH', 'FULL', 'COLLECTION'])
def _parse_get_instance_command(self):
"""Parses a GET INSTANCE command"""
# GET INSTANCE already matched
pass
def _parse_get_monitor_switches_command(self):
"""Parses a GET MONITOR SWITCHES command"""
# GET MONITOR SWITCHES already matched
self._parse_db_partition_clause()
def _parse_get_recommendations_for_health_indicator_command(self):
"""Parses a GET RECOMMENDATIONS FOR HEALTH INDICATOR command"""
# GET RECOMMENDATIONS FOR HEALTH INDICATOR already matched
self._expect_clp_string()
if self._match('FOR'):
if not self._match('DBM'):
if self._match('TABLESPACE'):
self._expect(TT.IDENTIFIER)
elif self._match('CONTAINER'):
self._expect_clp_string()
self._expect_sequence(['FOR', 'TABLESPACE', TT.IDENTIFIER])
elif self._match('DATABASE'):
pass
else:
self._expected_one_of(['TABLESPACE', 'CONTAINER', 'DATABASE', 'DBM'])
self._expect('ON')
self._expect_clp_string()
self._parse_db_partition_clause()
def _parse_get_routine_command(self):
"""Parses a GET ROUTINE command"""
# GET ROUTINE already matched
self._expect('INTO')
self._expect_clp_string()
self._expect('FROM')
self._match('SPECIFIC')
self._expect('PROCEDURE')
self._parse_routine_name()
self._match_sequence(['HIDE', 'BODY'])
def _parse_get_snapshot_command(self):
"""Parses a GET SNAPSHOT command"""
# GET SNAPSHOT already matched
self._expect('FOR')
if (
self._match_sequence(['DATABASE', 'MANAGER'])
or self._match_sequence(['DB', 'MANAGER'])
or self._match('DBM')
or self._match_sequence(['ALL', 'DCS', 'DATABASES'])
or self._match_sequence(['ALL', 'DATABASES'])
or self._match_sequence(['ALL', 'DCS', 'APPLICATIONS'])
or self._match_sequence(['ALL', 'APPLICATIONS'])
or self._match_sequence(['ALL', 'BUFFERPOOLS'])
or self._match_sequence(['FCM', 'FOR', 'ALL', 'DBPARTITIONNUMS'])
or self._match_sequence(['FCM', 'FOR', 'ALL', 'NODES'])
or (self._match_sequence(['DCS', 'APPLICATION', 'APPLID']) and self._match_clp_string())
or self._match_sequence(['DCS', 'APPLICATION', 'AGENTID', TT.NUMBER])
or (self._match_sequence(['APPLICATION', 'APPLID']) and self._match_clp_string())
or self._match_sequence(['APPLICATION', 'AGENTID', TT.NUMBER])
or (self._match_sequence(['LOCKS', 'FOR', 'APPLICATION', 'APPLID']) and self._match_clp_string())
or self._match_sequence(['LOCKS', 'FOR', 'APPLICATION', 'AGENTID', TT.NUMBER])
or self._match_sequence(['ALL', 'REMOTE_DATABASES'])
or self._match_sequence(['ALL', 'REMOTE_APPLICATIONS'])
):
pass
elif self._match_sequence(['DYNAMIC', 'SQL', 'ON']):
self._expect_clp_string()
self._match_sequence(['WRITE', 'TO', 'FILE'])
elif (
self._match('ALL')
or self._match_sequence(['DCS', 'DATABASE'])
or self._match_sequence(['DCS', 'DB'])
or self._match_sequence(['DCS', 'APPLICATIONS'])
or self._match_one_of([
'DATABASE',
'APPLICATIONS',
'TABLES',
'TABLESPACES',
'LOCKS',
'BUFFERPOOLS',
'REMOTE_DATABASES',
'REMOTE_APPLICATIONS'
])
):
self._expect('ON')
self._expect_clp_string()
else:
self._expected_one_of([
'ALL',
'DCS',
'DB',
'DBM',
'DATABASE',
'FCM',
'DYNAMIC',
'APPLICATION',
'APPLICATIONS',
'TABLES',
'TABLESPACES',
'LOCKS',
'BUFFERPOOLS',
'REMOTE_DATABASES',
'REMOTE_APPLICATIONS',
])
self._parse_db_partition_clause()
def _parse_import_method(self):
"""Parses the METHOD clause of an IMPORT/LOAD command"""
# METHOD already matched
if self._match('L'):
self._expect('(')
while True:
self._expect(TT.NUMBER) # col start
self._expect(TT.NUMBER) # col end
if not self._match(','):
break
self._expect(')')
if self._match('NULL'):
self._expect('INDICATORS')
self._expect('(')
self._parse_number_list()
self._expect(')')
elif self._match('N'):
self._expect('(')
self._parse_ident_list()
self._expect(')')
elif self._match('P'):
self._expect('(')
self._parse_number_list()
self._expect(')')
else:
self._expected_one_of(['L', 'N', 'P'])
def _parse_import_command(self):
"""Parses a IMPORT command"""
# IMPORT already matched
self._expect('FROM')
self.consumes.append((self._expect_clp_string().value, self.current_connection))
self._expect('OF')
self._expect_one_of(['ASC', 'DEL', 'IXF', 'WSF'])
if self._match('LOBS'):
self._expect('FROM')
self._parse_clp_string_list()
if self._match('XML'):
self._expect('FROM')
self._parse_clp_string_list()
if self._match('MODIFIED'):
self._expect('BY')
# See _parse_export_command() above for an explanation...
i = self._index
while True:
if self._token(i).value in [
'METHOD',
'COMMITCOUNT',
'RESTARTCOUNT',
'SKIPCOUNT',
'ROWCOUNT',
'WARNINGCOUNT',
'NOTIMEOUT',
'INSERT_UPDATE',
'REPLACE',
'REPLACE_CREATE',
'MESSAGES',
'INSERT',
'CREATE',
'ALLOW',
'XMLPARSE',
'XMLVALIDATE',
]:
while self._index < i:
self._output.append(self._token())
self._index += 1
break
if self._token(i).type == TT.EOF:
raise ParseError("Unable to find end of file-modifiers in IMPORT statement")
i += 1
if self._match('METHOD'):
self._parse_import_method()
if self._match('XMLPARSE'):
self._expect_one_of(['STRIP', 'PRESERVE'])
self._expect('WHITESPACE')
if self._match('XMLVALIDATE'):
self._expect('USING')
if self._match('XDS'):
if self._match('DEFAULT'):
self._parse_subschema_name()
if self._match('IGNORE'):
self._expect('(')
while True:
self._parse_subschema_name()
if not self._match(','):
break
self._expect(')')
if self._match('MAP'):
self._expect('(')
while True:
self._expect('(')
self._parse_subschema_name()
self._expect(',')
self._parse_subschema_name()
self._expect(')')
if not self._match(','):
break
self._expect(')')
elif self._match('SCHEMA'):
self._parse_subschema_name()
elif self._match('SCHEMALOCATION'):
self._expect('HINTS')
if self._match('ALLOW'):
self._expect_one_of(['NO', 'WRITE'])
self._expect('ACCESS')
if self._match('COMMITCOUNT'):
self._expect_one_of([TT.NUMBER, 'AUTOMATIC'])
if self._match_one_of(['RESTARTCOUNT', 'SKIPCOUNT']):
self._expect(TT.NUMBER)
if self._match('ROWCOUNT'):
self._expect(TT.NUMBER)
if self._match('WARNINGCOUNT'):
self._expect(TT.NUMBER)
if self._match('NOTIMEOUT'):
pass
if self._match('MESSAGES'):
self._expect_clp_string()
# Parse the action (CREATE/INSERT/etc.)
t = self._expect_one_of([
'CREATE',
'INSERT',
'INSERT_UPDATE',
'REPLACE',
'REPLACE_CREATE',
])
self._expect('INTO')
self._parse_table_name()
if self._match('('):
self._parse_ident_list()
self._expect(')')
if (t.value == 'CREATE') and self._match('IN'):
self._expect(TT.IDENTIFIER)
if self._match('INDEX'):
self._expect('IN')
self._expect(TT.IDENTIFIER)
if self._match('LONG'):
self._expect('IN')
self._expect(TT.IDENTIFIER)
def _parse_initialize_tape_command(self):
"""Parses an INTIALIZE TAPE command"""
# INITIALIZE TAPE already matched
if self._match('ON'):
self._expect_clp_string()
if self._match('USING'):
self._expect(TT.NUMBER)
def _parse_inspect_command(self):
"""Parses an INSPECT command"""
# INSPECT already matched
if self._match('ROWCOMPESTIMATE'):
self._expect('TABLE')
if self._match('NAME'):
self._expect(TT.IDENTIFIER)
if self._match('SCHEMA'):
self._expect(TT.IDENTIFIER)
elif self._match('TBSPACEID'):
self._expect_sequence([TT.NUMBER, 'OBJECTID', TT.NUMBER])
elif self._match('CHECK'):
if self._match('DATABASE'):
if self._match('BEGIN'):
self._expect_sequence(['TBSPACEID', TT.NUMBER])
self._match_sequence(['OBJECTID', TT.NUMBER])
elif self._match('TABLESPACE'):
if self._match('NAME'):
self._expect(TT.IDENTIFIER)
elif self._match('TBSPACEID'):
self._expect(TT.NUMBER)
if self._match('BEGIN'):
self._expect_sequence(['OBJECTID', TT.NUMBER])
if self._match('TABLE'):
if self._match('NAME'):
self._expect(TT.IDENTIFIER)
if self._match('SCHEMA'):
self._expect(TT.IDENTIFIER)
elif self._match('TBSPACEID'):
self._expect_sequence([TT.NUMBER, 'OBJECTID', TT.NUMBER])
else:
self._expected_one_of(['ROWCOMPESTIMATE', 'CHECK'])
self._match_sequence(['FOR', 'ERROR', 'STATE', 'ALL'])
if self._match_sequence(['LIMIT', 'ERROR', 'TO']):
self._expect_one_of(['DEFAULT', 'ALL', TT.NUMBER])
if self._match('EXTENTMAP'):
self._expect_one_of(['NORMAL', 'NONE', 'LOW'])
if self._match('DATA'):
self._expect_one_of(['NORMAL', 'NONE', 'LOW'])
if self._match('BLOCKMAP'):
self._expect_one_of(['NORMAL', 'NONE', 'LOW'])
if self._match('INDEX'):
self._expect_one_of(['NORMAL', 'NONE', 'LOW'])
if self._match('LONG'):
self._expect_one_of(['NORMAL', 'NONE', 'LOW'])
if self._match('LOB'):
self._expect_one_of(['NORMAL', 'NONE', 'LOW'])
if self._match('XML'):
self._expect_one_of(['NORMAL', 'NONE', 'LOW'])
self._match('INDEXDATA')
self._expect('RESULTS')
self._match('KEEP')
self._expect_clp_string()
self._parse_db_partitions_clause()
def _parse_instance_command(self):
"""Parses the custom (non-CLP) INSTANCE command"""
# INSTANCE already matched
self.current_instance = self._expect_clp_string().value
self.current_connection = None
def _parse_list_active_databases_command(self):
"""Parses a LIST ACTIVE DATABASES command"""
# LIST ACTIVE DATABASES already matched
self._parse_db_partition_clause()
def _parse_list_applications_command(self):
"""Parses a LIST APPLICATIONS command"""
# LIST APPLICATIONS already matched
if self._match('FOR'):
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
self._parse_db_partition_clause()
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_list_command_options_command(self):
"""Parses a LIST COMMAND OPTIONS command"""
# LIST COMMAND OPTIONS already matched
pass
def _parse_list_db_directory_command(self):
"""Parses a LIST DB DIRECTORY command"""
# LIST [DATABASE|DB] DIRECTORY already matched
if self._match('ON'):
self._expect_clp_string()
def _parse_list_database_partition_groups_command(self):
"""Parses a LIST DATABASE PARTITION GROUPS command"""
# LIST DATABASE PARTITION GROUPS already matched
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_list_nodes_command(self):
"""Parses a LIST NODES command"""
# LIST DBPARTITIONNUMS|NODES already matched
pass
def _parse_list_dcs_applications_command(self):
"""Parses a LIST DCS APPLICATIONS command"""
# LIST DCS APPLICATIONS already matched
if not self._match('EXTENDED'):
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_list_dcs_directory_command(self):
"""Parses a LIST DCS DIRECTORY command"""
# LIST DCS DIRECTORY already matched
pass
def _parse_list_drda_indoubt_transactions_command(self):
"""Parses a LIST DRDA INDOUBT TRANSACTIONS command"""
# LIST DRDA INDOUBT TRANSACTIONS already matched
self._match_sequence(['WITH', 'PROMPTING'])
def _parse_list_history_command(self):
"""Parses a LIST HISTORY command"""
# LIST HISTORY already matched
if self._match_one_of(['CREATE', 'ALTER', 'RENAME']):
self._expect('TABLESPACE')
elif self._match('ARCHIVE'):
self._expect('LOG')
elif self._match('DROPPED'):
self._expect('TABLE')
else:
self._match_one_of(['BACKUP', 'ROLLFORWARD', 'LOAD', 'REORG'])
if self._match('SINCE'):
self._expect(TT.NUMBER)
elif self._match('CONTAINING'):
self._parse_subschema_name()
elif not self._match('ALL'):
self._expected_one_of(['ALL', 'SINCE', 'CONTAINING'])
self._expect('FOR')
self._match_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
def _parse_list_indoubt_transactions_command(self):
"""Parses a LIST INDOUBT TRANSACTIONS command"""
# LIST INDOUBT TRANSACTIONS already matched
self._match_sequence(['WITH', 'PROMPTING'])
def _parse_list_node_directory_command(self):
"""Parses a LIST NODE DIRECTORY command"""
# LIST [ADMIN] NODE DIRECTORY already matched
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_list_odbc_data_sources_command(self):
"""Parses a LIST ODBC DATA SOURCES command"""
# LIST [USER|SYSTEM] ODBC DATA SOURCES already matched
pass
def _parse_list_tables_command(self):
"""Parses a LIST TABLES command"""
# LIST PACKAGES|TABLES already matched
if self._match('FOR'):
if self._match('SCHEMA'):
self._expect(TT.IDENTIFIER)
elif not self._match_one_of(['USER', 'SYSTEM', 'ALL']):
self._expected_one_of(['USER', 'SYSTEM', 'ALL', 'SCHEMA'])
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_list_tablespace_containers_command(self):
"""Parses a LIST TABLESPACE CONTAINERS command"""
# LIST TABLESPACE CONTAINERS already matched
self._expect_sequence(['FOR', TT.NUMBER])
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_list_tablespaces_command(self):
"""Parses a LIST TABLESPACES command"""
# LIST TABLESPACES already matched
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_list_utilities_command(self):
"""Parses a LIST UTILITIES command"""
# LIST UTILITIES already matched
self._match_sequence(['SHOW', 'DETAIL'])
def _parse_load_command(self):
"""Parses a LOAD command"""
# LOAD already matched
self._match('CLIENT')
self._expect('FROM')
filename = self._expect_clp_string().value
self._expect('OF')
if self._expect_one_of(['ASC', 'DEL', 'IXF', 'CURSOR']).value != 'CURSOR':
self.consumes.append((filename, self.current_connection))
if self._match('LOBS'):
self._expect('FROM')
self._parse_clp_string_list()
if self._match('XML'):
self._expect('FROM')
self._parse_clp_string_list()
if self._match('MODIFIED'):
self._expect('BY')
# See _parse_export_command() above for an explanation...
i = self._index
while True:
if self._token(i)[1] in [
'INSERT',
'MESSAGES',
'METHOD',
'REPLACE',
'RESTART',
'ROWCOUNT',
'SAVECOUNT',
'TEMPFILES',
'TERMINATE',
'WARNINGCOUNT',
'XMLPARSE',
'XMLVALIDATE',
]:
while self._index < i:
self._output.append(self._token())
self._index += 1
break
if self._token(i).type == TT.EOF:
raise ParseError("Unable to find end of file-modifiers in LOAD statement")
i += 1
if self._match('METHOD'):
self._parse_import_method()
if self._match('XMLPARSE'):
self._expect_one_of(['STRIP', 'PRESERVE'])
self._expect('WHITESPACE')
if self._match('XMLVALIDATE'):
self._expect('USING')
if self._match('XDS'):
if self._match('DEFAULT'):
self._parse_subschema_name()
if self._match('IGNORE'):
self._expect('(')
while True:
self._parse_subschema_name()
if not self._match(','):
break
self._expect(')')
if self._match('MAP'):
self._expect('(')
while True:
self._expect('(')
self._parse_subschema_name()
self._expect(',')
self._parse_subschema_name()
self._expect(')')
if not self._match(','):
break
self._expect(')')
elif self._match('SCHEMA'):
self._parse_subschema_name()
elif self._match('SCHEMALOCATION'):
self._expect('HINTS')
if self._match('SAVECOUNT'):
self._expect(TT.NUMBER)
if self._match('ROWCOUNT'):
self._expect(TT.NUMBER)
if self._match('WARNINGCOUNT'):
self._expect(TT.NUMBER)
if self._match('MESSAGES'):
self._expect_clp_string()
if self._match('TEMPFILES'):
self._expect('PATH')
self._expect_clp_string()
if self._expect_one_of(['INSERT', 'RESTART', 'REPLACE', 'TERMINATE']).value == 'REPLACE':
self._match_one_of(['KEEPDICTIONARY', 'RESETDICTIONARY'])
self._expect('INTO')
self._parse_table_name()
if self._match('('):
self._parse_ident_list()
self._expect(')')
if self._match('FOR'):
self._expect('EXCEPTION')
self._parse_table_name()
if self._match_one_of(['NORANGEEXC', 'NOUNIQUEEXC']):
if self._match(','):
self._expect_one_of(['NORANGEEXC', 'NOUNIQUEEXC'])
if self._match('STATISTICS'):
if self._expect_one_of(['NO', 'USE']).value == 'USE':
self._expect('PROFILE')
if self._match('COPY'):
if self._expect_one_of(['NO', 'YES']).value == 'YES':
if self._match('USE'):
self._expect('TSM')
if self._match('OPEN'):
self._expect_sequence([TT.NUMBER, 'SESSIONS'])
elif self._match('TO'):
self._parse_clp_string_list()
elif self._match('LOAD'):
self._expect_clp_string()
if self._match('OPEN'):
self._expect_sequence([TT.NUMBER, 'SESSIONS'])
else:
self._expected_one_of(['USE', 'TO', 'LOAD'])
elif self._match('NONRECOVERABLE'):
pass
if self._match('WITHOUT'):
self._expect('PROMPTING')
if self._match('DATA'):
self._expect('BUFFER')
self._expect(TT.NUMBER)
if self._match('SORT'):
self._expect('BUFFER')
self._expect(TT.NUMBER)
if self._match('CPU_PARALLELISM'):
self._expect(TT.NUMBER)
if self._match('DISK_PARALLELISM'):
self._expect(TT.NUMBER)
if self._match('FETCH_PARALLELISM'):
self._expect_one_of(['YES', 'NO'])
if self._match('INDEXING'):
self._expect('MODE')
self._expect_one_of(['AUTOSELECT', 'REBUILD', 'INCREMENTAL', 'DEFERRED'])
if self._match('ALLOW'):
if self._match_sequence(['READ', 'ACCESS']):
self._match_sequence(['USE', TT.IDENTIFIER])
elif self._match_sequence(['NO', 'ACCESS']):
pass
else:
self._expected_one_of(['READ', 'NO'])
if self._match_sequence(['SET', 'INTEGRITY']):
self._expect_sequence(['PENDING', 'CASCADE'])
self._expect_one_of(['DEFERRED', 'IMMEDIATE'])
if self._match('LOCK'):
self._expect_sequence(['WITH', 'FORCE'])
if self._match('SOURCEUSEREXIT'):
self._expect_clp_string()
if self._match('REDIRECT'):
if self._match('INPUT'):
self._expect('FROM')
self._expect_one_of(['BUFFER', 'FILE'])
self._expect_clp_string()
if self._match('OUTPUT'):
self._expect_sequence(['TO', 'FILE'])
self._expect_clp_string()
self._match_sequence(['PARTITIONED', 'DB', 'CONFIG'])
while True:
if self._match('MODE'):
self._expect_one_of([
'PARTITION_AND_LOAD',
'PARTITION_ONLY',
'LOAD_ONLY',
'LOAD_ONLY_VERIFY_PART',
'ANALYZE',
])
elif self._match('ISOLATE_PART_ERRS'):
self._expect_one_of([
'SETUP_ERRS_ONLY',
'LOAD_ERRS_ONLY',
'SETUP_AND_LOAD_ERRS',
'NO_ISOLATION',
])
elif self._match_one_of(['PART_FILE_LOCATION', 'MAP_FILE_INPUT', 'MAP_FILE_OUTPUT', 'DISTFILE']):
self._expect_clp_string()
elif self._match_one_of(['OUTPUT_DBPARTNUMS', 'PARTITIONING_DBPARTNUMS']):
self._expect('(')
while True:
self._expect(TT.NUMBER)
if self._match('TO'):
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
elif self._match_one_of(['MAXIMUM_PART_AGENTS', 'STATUS_INTERVAL', 'TRACE', 'RUN_STAT_DBPARTNUM']):
self._expect(TT.NUMBER)
elif self._match('PORT_RANGE'):
self._expect_sequence(['(', TT.NUMBER, ',', TT.NUMBER, ')'])
elif self._match_one_of(['CHECK_TRUNCATION', 'NEWLINE', 'OMIT_HEADER']):
pass
else:
break
def _parse_load_query_command(self):
"""Parses a LOAD QUERY command"""
# LOAD QUERY already matched
self._expect('TABLE')
self._parse_table_name()
if self._match('TO'):
self._expect_clp_string()
self._match_one_of(['NOSUMMARY', 'SUMMARYONLY'])
self._match('SHOWDELTA')
def _parse_migrate_db_command(self):
"""Parses a MIGRATE DB command"""
# MIGRATE [DATABASE|DB] already matched
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_on_command(self):
"""Parses the custom (non-CLP) ON SQLCODE|SQLSTATE|ERROR|REGEX command"""
# ON already matched
if self._match('SQLCODE'):
if self._match((TT.OPERATOR, '-')):
self._expect(TT.NUMBER)
else:
self._expect_one_of([TT.STRING, TT.NUMBER])
elif self._match('SQLSTATE'):
self._expect(TT.STRING)
elif self._match('ERROR'):
pass
elif self._match('REGEX'):
self._expect(TT.STRING)
else:
self._expected_one_of(['SQLCODE', 'SQLSTATE', 'ERROR', 'REGEX'])
wait = False
if self._match('WAIT'):
wait = True
self._expect(TT.NUMBER)
self._expect_one_of(['SECOND', 'SECONDS', 'MINUTE', 'MINUTES', 'HOUR', 'HOURS'])
self._match('AND')
retry = False
if self._match('RETRY'):
retry = True
self._expect_one_of(['STATEMENT', 'SCRIPT'])
if self._match(TT.NUMBER):
self._expect_one_of(['TIME', 'TIMES'])
self._match('THEN')
if wait and not retry:
self._expected('RETRY')
self._expect_one_of(['FAIL', 'STOP', 'CONTINUE', 'IGNORE'])
def _parse_ping_command(self):
"""Parses a PING command"""
# PING already matched
self._expect_clp_string()
if self._match('REQUEST'):
self._expect(TT.NUMBER)
if self._match('RESPONSE'):
self._expect(TT.NUMBER)
if self._match(TT.NUMBER):
self._match_one_of(['TIME', 'TIMES'])
def _parse_precompile_command(self):
"""Parses a PRECOMPILE command"""
# [PREP|PRECOMPILE] already matched
# XXX Can these parameters be specified in any order?
self._expect_clp_string()
if self._match('ACTION'):
if self._match_one_of(['ADD', 'REPLACE']).value == 'ADD':
pass
else:
if self._match('RETAIN'):
self._expect_one_of(['YES', 'NO'])
if self_match('REPLVER'):
self._expect_clp_string()
if self._match('APREUSE'):
self._expect_one_of(['YES', 'NO'])
if self._match('BINDFILE'):
if self._match('USING'):
self._expect_clp_string()
if self._match('BLOCKING'):
self._expect_one_of(['UNAMBIG', 'ALL', 'NO'])
if self._match('COLLECTION'):
self._expect(TT.IDENTIFIER)
if self._match('CALL_RESOLUTION'):
self._expect_one_of(['IMMEDIATE', 'DEFERRED'])
if self._match('CCSIDG'):
self._expect(TT.NUMBER)
if self._match('CCSIDM'):
self._expect(TT.NUMBER)
if self._match('CCSIDS'):
self._expect(TT.NUMBER)
if self._match('CHARSUB'):
self._expect_one_of(['DEFAULT', 'BIT', 'MIXED', 'SBCS'])
if self._match('CNULREQD'):
self._expect_one_of(['YES', 'NO'])
if self._match('COLLECTION'):
self._expect(TT.IDENTIFIER)
self._match_one_of(['COMPILE', 'PRECOMPILE'])
if self._match('CONCURRENTACCESSRESOLUTION'):
if self._expect_one_of(['USE', 'WAIT']).value == 'USE':
self._expect_sequence(['CURRENTLY', 'COMMITTED'])
else:
self._expect_sequence(['FOR', 'OUTCOME'])
if self._match('CONNECT'):
self._expect(TT.NUMBER)
if self._match('DATETIME'):
self._expect_one_of(['DEF', 'EUR', 'ISO', 'JIS', 'LOC', 'USA'])
if self._match('DBPROTOCOL'):
self._expect_one_of(['DRDA', 'PRIVATE'])
if self._match('DEC'):
self._expect(TT.NUMBER)
if self._match('DECDEL'):
self._expect_one_of(['PERIOD', 'COMMA'])
if self._match('DEFERRED_PREPARE'):
self._expect_one_of(['NO', 'ALL', 'YES'])
if self._match('DEGREE'):
self._expect_one_of([TT.NUMBER, 'ANY'])
if self._match('DISCONNECT'):
self._expect_one_of(['EXPLICIT', 'AUTOMATIC', 'CONDITIONAL'])
if self._match('DYNAMICRULES'):
self._expect_one_of(['RUN', 'BIND', 'INVOKERUN', 'INVOKEBIND', 'DEFINERUN', 'DEFINEBIND'])
if self._match('ENCODING'):
self._expect_one_of(['ASCII', 'EBCDIC', 'UNICODE', 'CCSID'])
if self._match('EXPLAIN'):
self._expect_one_of(['NO', 'ALL', 'ONLY', 'REOPT', 'YES'])
if self._match('EXPLSNAP'):
self._expect_one_of(['NO', 'ALL', 'REOPT', 'YES'])
if self._match('EXTENDEDINDICATOR'):
self._expect_one_of(['YES', 'NO'])
if self._match('FEDERATED'):
self._expect_one_of(['YES', 'NO'])
if self._match('FEDERATED_ASYNCHRONY'):
self._expect_one_of([TT.NUMBER, 'ANY'])
if self._match('FUNCPATH'):
self._parse_ident_list()
if self._match('GENERIC'):
self._expect_clp_string()
if self._amtch('IMMEDWRITE'):
self._expect_one_of(['NO', 'YES', 'PH1'])
if self._match('INSERT'):
self._expect_one_of(['DEF', 'BUF'])
if self._match('ISOLATION'):
self._expect_one_of(['CS', 'NC', 'RR', 'RS', 'UR'])
if self._match('KEEPDYNAMIC'):
self._expect_one_of(['YES', 'NO'])
if self._match('LANGLEVEL'):
self._expect_one_of(['SAA1', 'MIA', 'SQL92E'])
if self._match('LEVEL'):
self._expect(TT.IDENTIFIER)
if self._match('LONGERROR'):
self._expect_one_of(['YES', 'NO'])
if self._match('MESSAGES'):
self._expect_clp_string()
if self._match('NOLINEMACRO'):
pass
if self._match('OPTHINT'):
self._expect_clp_string()
if self._match('OPTLEVEL'):
self._expect(TT.NUMBER)
if self._match('OPTPROFILE'):
self._expect_clp_string()
if self._match('OS400NAMING'):
self._expect_one_of(['SYSTEM', 'SQL'])
if self._match('OUTPUT'):
self._expect_clp_string()
if self._match('OWNER'):
self._expect(TT.IDENTIFIER)
if self._match('PACKAGE'):
if self._match('USING'):
self._expect(TT.IDENTIFIER)
if self._match('PREPROCESSOR'):
self._expect_clp_string()
if self._match('QUALIFIER'):
self._expect(TT.IDENTIFIER)
if self._match('QUERYOPT'):
self._expect(TT.NUMBER)
if self._match('RELEASE'):
self._expect_one_of(['COMMIT', 'DEALLOCATE'])
if self._match('REOPT'):
self._expect_one_of(['NONE', 'ONCE', 'ALWAYS', 'VARS'])
if self._match_one_of(['REOPT', 'NOREOPT']):
self._expect('VARS')
if self._match('SQLCA'):
self._expect_one_of(['NONE', 'SAA'])
if self._match('SQLERROR'):
self._expect_one_of(['NOPACKAGE', 'CHECK', 'CONTINUE'])
if self._match('SQLFLAG'):
self._expect_one_of(['SQL92E', 'MVSDB2V23', 'MVSDB2V31', 'MVSDB2V41'])
self._expect('SYNTAX')
if self._match('SORTSEQ'):
self._expect_one_of(['JOBRUN', 'HEX'])
if self._match('SQLRULES'):
self._expect_one_of(['DB2', 'STD'])
if self._match('SQLWARN'):
self._expect_one_of(['YES', 'NO'])
if self._match('STATICREADONLY'):
self._expect_one_of(['YES', 'NO', 'INSENSITIVE'])
if self._match('STRDEL'):
self._expect_one_of(['APOSTROPHE', 'QUOTE'])
if self._match('SYNCPOINT'):
self._expect_one_of(['ONEPHASE', 'NONE', 'TWOPHASE'])
if self._match('SYNTAX'):
pass
if self._match('TARGET'):
self._expect_one_of(['IBMCOB', 'MFCOB', 'ANSI_COBOL', 'C', 'CPLUSPLUS', 'FORTRAN', 'BORLAND_C', 'BORLAND_CPLUSPLUS'])
if self._match('TEXT'):
self._expect_clp_string()
if self._match('TRANSFORM'):
self._expect('GROUP')
self._expect(TT.IDENTIFIER)
if self._match('VALIDATE'):
self._expect_one_of(['BIND', 'RUN'])
if self._match('WCHARTYPE'):
self._expect_one_of(['NOCONVERT', 'CONVERT'])
if self._match('VERSION'):
self._expect_clp_string()
def _parse_prune_history_command(self):
"""Parses a PRUNE HISTORY command"""
# PRUNE HISTORY already matched
self._expect(TT.NUMBER)
self._match_sequence(['WITH', 'FORCE', 'OPTION'])
self._match_sequence(['AND', 'DELETE'])
def _parse_prune_logfile_command(self):
"""Parses a PRUNE LOGFILE command"""
# PRUNT LOGFILE already matched
self._expect_sequence(['PRIOR', 'TO'])
self._expect_clp_string()
def _parse_put_routine_command(self):
"""Parses a PUT ROUTINE command"""
# PUT ROUTINE already matched
self._expect('FROM')
self._expect_clp_string()
if self._match('OWNER'):
self._expect(TT.IDENTIFIER)
self._match_sequence(['USE', 'REGISTERS'])
def _parse_query_client_command(self):
"""Parses a QUERY CLIENT command"""
# QUERY CLIENT already matched
pass
def _parse_quiesce_command(self):
"""Parses a QUIESCE DB / INSTANCE command"""
# QUIESCE already matched
if self._match('INSTANCE'):
self._expect_clp_string()
if self._match_one_of(['USER', 'GROUP']):
self._expect(TT.IDENTIFIER)
self._match_sequence(['RESTRICTED', 'ACCESS'])
elif self._match_one_of(['DATABASE', 'DB']):
pass
else:
self._expected_one_of(['DATABASE', 'DB', 'INSTANCE'])
if self._expect_one_of(['IMMEDIATE', 'DEFER'])[1] == 'DEFER':
if self._match('WITH'):
self._expect_sequence(['TIMEOUT', TT.NUMBER])
self._match_sequence(['FORCE', 'CONNECTIONS'])
def _parse_quiesce_tablespaces_command(self):
"""Parses a QUIESCE TABLESPACES command"""
# QUIESCE TABLESPACES already matched
self._expect_sequence(['FOR', 'TABLE'])
self._parse_table_name()
if self._expect_one_of(['SHARE', 'INTENT', 'EXCLUSIVE', 'RESET']).value == 'INTENT':
self._expect_sequence(['TO', 'UPDATE'])
def _parse_quit_command(self):
"""Parses a QUIT command"""
# QUIT already matched
pass
def _parse_rebind_command(self):
"""Parses a REBIND command"""
# REBIND already matched
self._match('PACKAGE')
self._parse_subschema_name()
if self._match('VERSION'):
self._expect_clp_string()
if self._match('APREUSE'):
self._expect_one_of(['YES', 'NO'])
if self._match('RESOLVE'):
self._expect_one_of(['ANY', 'CONSERVATIVE'])
if self._match('REOPT'):
self._expect_one_of(['NONE', 'ONCE', 'ALWAYS'])
def _parse_recover_db_command(self):
"""Parses a RECOVER DB command"""
# RECOVER [DATABASE|DB] already matched
self._expect_clp_string()
if self._match('TO'):
if self._match('END'):
self._expect_sequence(['OF', 'LOGS'])
self._parse_db_partitions_clause()
else:
self._expect_clp_string()
if self._match('USING'):
self._expect_one_of(['LOCAL', 'UTC'])
self._expect('TIME')
if self._match('ON'):
self._expect('ALL')
self._expect_one_of(['DBPARTITIONNUMS', 'NODES'])
self._parse_login(optional=True, allowchange=False)
if self._match('USING'):
self._expect_sequence(['HISTORY', 'FILE'])
self._expect('(')
self._expect_clp_string()
if self._match(','):
while True:
self._expect_clp_string()
self._expect('ON')
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
if self._match('OVERFLOW'):
self._expect_sequence(['LOG', 'PATH'])
self._expect('(')
self._expect_clp_string()
if self._match(','):
while True:
self._expect_clp_string()
self._expect('ON')
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
if self._match('COMPRLIB'):
self._expect_clp_string()
if self._match('COMPROPTS'):
self._expect_clp_string()
self._match('RESTART')
def _parse_redistribute_database_partition_group_command(self):
"""Parses a REDISTRIBUTE DATABASE PARTITION GROUP command"""
# REDISTRIBUTE DATABASE PARTITION GROUP already matched
self._expect_clp_string()
self._match_sequence(['NOT', 'ROLLFORWARD', 'RECOVERABLE'])
t = self._expect_one_of(['UNIFORM',' USING', 'COTNINUE', 'ABORT']).value
partitions = False
if t == 'USING':
if self._expect_one_of(['DISTFILE', 'TARGETMAP']).value == 'DISTFILE':
partitions = True
self._expect_clp_string()
elif t == 'UNIFORM':
partitions = True
if partitions:
if self._match('ADD'):
self._parse_db_partition_list_clause(size=False)
if self._match('DROP'):
self._parse_db_partition_list_clause(size=False)
if self._match('TABLE'):
self._expect('(')
while True:
self._parse_table_name()
if not self._match(','):
break
self._expect(')')
self._match_one_of(['ONCE', 'FIRST'])
if self._match('INDEXING'):
self._expect('MODE')
self._expect_one_of(['REBUILD', 'DEFERRED'])
elif self._match('DATA'):
self._expect('BUFFER')
self._expect(TT.NUMBER)
elif self._match('STATISTICS'):
if self._expect_one_of(['USE', 'NONE']).value == 'USE':
self._expect('PROFILE')
elif self._match('STOP'):
self._expect('AT')
self._expect_clp_string()
def _parse_refresh_ldap_command(self):
"""Parses a REFRESH LDAP command"""
# REFRESH LDAP already matched
if self._match('CLI'):
self._expect('CFG')
elif self._match_one_of(['DB', 'NODE']):
self._expect('DIR')
elif self._match('IMMEDIATE'):
self._match('ALL')
else:
self._expected_one_of(['CLI', 'DB', 'NODE', 'IMMEDIATE'])
def _parse_register_command(self):
"""Parses a REGISTER command"""
# REGISTER already matched
self._match_sequence(['DB2', 'SERVER'])
self._match('IN')
self._match('ADMIN')
self._expect('LDAP')
self._expect_one_of(['NODE', 'AS'])
self._expect_clp_string()
self._expect('PROTOCOL')
if self._expect_one_of(['TCPIP', 'TCPIP4', 'TCPIP6', 'NPIPE']).value != 'NPIPE':
if self._match('HOSTNAME'):
self._expect_clp_string()
if self._match('SVCENAME'):
self._expect_clp_string()
self._match_sequence(['SECURITY', 'SOCKS'])
if self._match('REMOTE'):
self._expect_clp_string()
if self._match('INSTANCE'):
self._expect_clp_string()
if self._match('NODETYPE'):
self._expect_one_of(['SERVER', 'MPP', 'DCS'])
if self._match('OSTYPE'):
self._expect_clp_string()
if self._match('WITH'):
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_register_xmlschema_command(self):
"""Parses a REGISTER XMLSCHEMA command"""
# REGISTER XMLSCHEMA already matched
self._expect_clp_string()
self._expect('FROM')
self._expect_clp_string()
if self._match('WITH'):
self._expect_clp_string()
if self._match('AS'):
self._parse_subschema_name()
if self._match('('):
while True:
self._expect('ADD')
self._expect_clp_string()
self._expect('FROM')
self._expect_clp_string()
if self._match('WITH'):
self._expect_clp_string()
if self._match(')'):
break
if self._match('COMPLETE'):
if self._match('WITH'):
self._expect_clp_string()
if self._match('ENABLE'):
self._expect('DECOMPOSITION')
def _parse_register_xsrobject_command(self):
"""Parses a REGISTER XSROBJECT command"""
# REGISTER XSROBJECT already matched
self._expect_clp_string()
if self._match('PUBLIC'):
self._expect_clp_string()
self._expect('FROM')
self._expect_clp_string()
if self._match('AS'):
self._parse_subschema_name()
if self._match('EXTERNAL'):
self._expect('ENTITY')
else:
self._expect_one_of(['DTD', 'EXTERNAL'])
def _parse_reorg_command(self):
"""Parses a REORG command"""
def parse_table_clause():
if self._match('INDEX'):
self._parse_index_name()
if self._match('INPLACE'):
if not self._match_one_of(['STOP', 'PAUSE']):
if self._match('ALLOW'):
self._expect_one_of(['READ', 'WRITE'])
self._expect('ACCESS')
if self._match('NOTRUNCATE'):
self._expect('TABLE')
self._match_one_of(['START', 'RESUME'])
else:
if self._match('ALLOW'):
self._expect_one_of(['READ', 'NO'])
self._expect('ACCESS')
if self._match('USE'):
self._expect(TT.IDENTIFIER)
self._match('INDEXSCAN')
if self._match('LONGLOBDATA'):
if self._match('USE'):
self._expect(TT.IDENTIFIER)
self._match_one_of(['KEEPDICTIONARY', 'RESETDICTIONARY'])
def parse_index_clause():
if self._match('ALLOW'):
self._expect_one_of(['NO', 'WRITE', 'READ'])
self._expect('ACCESS')
if self._match_one_of(['CONVERT', 'CLEANUP']).value == 'CLEANUP':
self._expect('ONLY')
self._match_one_of(['ALL', 'PAGES'])
# REORG already matched
if self._match('TABLE'):
self._parse_table_name()
if self._match('RECLAIM'):
self._expect_sequence(['EXTENTS', 'ONLY'])
if self._match('ALLOW'):
self._expect_one_of(['READ', 'WRITE', 'NO'])
self._expect('ACCESS')
else:
parse_table_clause()
elif self._match('INDEX'):
self._parse_index_name()
if self._match('FOR'):
self._expect('TABLE')
self._parse_table_name()
parse_index_clause()
elif self._match('INDEXES'):
self._expect_sequence(['ALL', 'FOR', 'TABLE'])
self._parse_table_name()
parse_index_clause()
else:
self._expected_one_of(['TABLE', 'INDEX', 'INDEXES'])
if self._match_sequence(['ON', 'DATA', 'PARTITION']):
self._expect(TT.IDENTIFIER)
self._parse_db_partitions_clause()
def _parse_reorgchk_command(self):
"""Parses a REORGCHK command"""
# REORGCHK already matched
if self._match_one_of(['UPDATE', 'CURRENT']):
self._expect('STATISTICS')
if self._match('ON'):
if self._match('SCHEMA'):
self._expect(TT.IDENTIFIER)
elif self._match('TABLE'):
if not self._match_one_of(['SYSTEM', 'USER', 'ALL']):
self._parse_table_name()
else:
self._expected_one_of(['SCHEMA', 'TABLE'])
def _parse_reset_admin_cfg_command(self):
"""Parses a RESET ADMIN CFG command"""
# RESET ADMIN [CONFIGURATION|CONFIG|CFG] already matched
if self._match('FOR'):
self._expect('NODE')
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_reset_alert_cfg_command(self):
"""Parses a RESET ALERT CFG command"""
# RESET ALERT [CONFIGURATION|CONFIG|CFG] already matched
self._expect('FOR')
if (
self._match_sequence(['DATABASE', 'MANAGER'])
or self._match_sequence(['DB', 'MANAGER'])
or self._match_one_of(['DBM', 'CONTAINERS', 'DATABASES', 'TABLESPACES'])
):
pass
elif (
self._match_sequence(['CONTAINER', TT.IDENTIFIER, 'FOR', TT.IDENTIFIER])
or self._match_sequence('TABLESPACE', TT.IDENTIFIER)
or self._match('DATABASE')
):
self._expect('ON')
self._expect_clp_string()
if self._match('USING'):
self._expect_clp_string()
def _parse_reset_db_cfg_command(self):
"""Parses a RESET DB CFG command"""
# RESET [DATABASE|DB] [CONFIGURATION|CONFIG|CFG] already matched
self._expect('FOR')
self._expect_clp_string()
if self._match_one_of(['DBPARTITIONNUM', 'NODE']):
self._expect(TT.NUMBER)
def _parse_reset_dbm_cfg_command(self):
"""Parses a RESET DBM CFG command"""
# RESET [DATABASE MANAGER|DB MANAGER|DBM] [CONFIGURATION|CONFIG|CFG] already matched
pass
def _parse_reset_monitor_command(self):
"""Parses a RESET MONITOR command"""
# RESET MONITOR already matched
if self._match('ALL'):
self._match('DCS')
elif self._match('FOR'):
self._match('DCS')
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
else:
self._expected_one_of(['ALL', 'FOR'])
self._parse_db_partition_clause()
def _parse_restart_db_command(self):
"""Parses a RESTART DB command"""
# RESTART [DATABASE|DB] already matched
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
if self._match('DROP'):
self._expect_sequence(['PENDING', 'TABLESPACES'])
self._expect('(')
while True:
self._expect(TT.IDENTIFIER)
if not self._match(','):
break
self._expect(')')
self._match_sequence(['WRITE', 'RESUME'])
def _parse_restore_db_command(self):
"""Parses a RESTORE DB command"""
# RESTORE [DATABASE|DB] already matched
self._expect_clp_string()
if self._match_one_of(['CONTINUE', 'ABORT']):
pass
else:
self._parse_login(optional=True, allowchange=False)
if self._match('TABLESPACE'):
self._expect('(')
self._parse_ident_list()
self._expect(')')
if self._match('SCHEMA'):
if self._match('('):
self._parse_ident_list()
self._expect(')')
self._match('ONLINE')
elif (
self._match_sequence(['HISTORY', 'FILE'])
or self._match_sequence(['COMPRESSION', 'LIBRARY'])
or self._match('LOGS')
):
self._match('ONLINE')
elif self._match('REBUILD'):
self._expect('WITH')
if self._match('ALL'):
self._expect_sequence(['TABLESPACES', 'IN'])
self._expect_one_of(['DATABASE', 'IMAGE'])
if self._match('EXCEPT'):
self._expect('TABLESPACE')
self._expect('(')
self._parse_ident_list()
self._expect(')')
else:
self._expect('TABLESPACE')
self._expect('(')
self._parse_ident_list()
self._expect(')')
if self._match('INCREMENTAL'):
self._match_one_of(['AUTO', 'AUTOMATIC', 'ABORT'])
if self._match('USE'):
self._match_one_of(['TSM', 'XBSA'])
if self._match('OPEN'):
self._expect(TT.NUMBER)
self._expect('SESSIONS')
if self._match('OPTIONS'):
self._expect_clp_string()
# XXX Add support for @filename response file
elif self._match('FROM'):
self._parse_clp_string_list()
elif self._match('LOAD'):
self._expect_clp_string()
if self._match('OPEN'):
self._expect(TT.NUMBER)
self._expect('SESSIONS')
if self._match('OPTIONS'):
self._expect_clp_string()
# XXX Add support for @filename response file
if self._match('TAKEN'):
self._expect('AT')
self._expect(TT.NUMBER)
if self._match('TO'):
self._expect_clp_string()
elif self._match('DBPATH'):
self._expect('ON')
self._expect_clp_string()
elif self._match('ON'):
self._parse_clp_string_list()
if self._match('DBPATH'):
self._expect('ON')
self._expect_clp_string()
if self._match('INTO'):
self._expect_clp_string()
if self._match('LOGTARGET'):
if self._match_one_of(['INCLUDE', 'EXCLUDE']):
self._match('FORCE')
else:
self._expect_clp_string()
if self._match('NEWLOGPATH'):
self._expect_clp_string()
if self._match('WITH'):
self._expect(TT.NUMBER)
self._expect('BUFFERS')
if self._match('BUFFER'):
self._expect(TT.NUMBER)
self._match_sequence(['REPLACE', 'HISTORY', 'FILE'])
self._match_sequence(['REPLACE', 'EXISTING'])
if self._match('REDIRECT'):
if self._match('GENERATE'):
self._expect('SCRIPT')
self._expect_clp_string()
if self._match('PARALLELISM'):
self._expect(TT.NUMBER)
if self._match('COMPRLIB'):
self._expect_clp_string()
if self._match('COMPROPTS'):
self._expect_clp_string()
self._match_sequence(['WITHOUT', 'ROLLING', 'FORWARD'])
self._match_sequence(['WITHOUT', 'PROMPTING'])
def _parse_rewind_tape_command(self):
"""Parses a REWIND TAPE command"""
# REWIND TAPE already matched
if self._match('ON'):
self._expect_clp_string()
def _parse_rollforward_db_command(self):
"""Parses a ROLLFORWARD DB command"""
# ROLLFORWARD [DATABASE|DB] already matched
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
if self._match('TO'):
if self._match('END'):
self._expect('OF')
if self._expect_one_of(['LOGS', 'BACKUP']).value == 'BACKUP':
if self._match('ON'):
self._expect('ALL')
self._expect_one_of(['DBPARTITIONNUMS', 'NODES'])
else:
self._parse_db_partitions_clause()
else:
self._expect(TT.NUMBER)
if self._match('ON'):
self._expect('ALL')
self._expect_one_of(['DBPARTITIONNUMS', 'NODES'])
if self._match('USING'):
self._expect_one_of(['UTC', 'LOCAL'])
self._expect('TIME')
if self._match('AND'):
self._expect_one_of(['COMPLETE', 'STOP'])
elif self._match_one_of(['COMPLETE', 'STOP', 'CANCEL']):
self._parse_db_partitions_clause()
elif self._match('QUERY'):
self._expect('STATUS')
if self._match('USING'):
self._expect_one_of(['UTC', 'LOCAL'])
self._expect('TIME')
self._parse_db_partitions_clause()
if self._match('TABLESPACE'):
if not self._match('ONLINE'):
self._expect('(')
self._parse_ident_list()
self._expect(')')
self._match('ONLINE')
if self._match('OVERFLOW'):
self._expect_sequence(['LOG', 'PATH'])
self._expect('(')
self._expect_clp_string()
if self._match(','):
while True:
self._expect_clp_string()
self._expect('ON')
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
self._match('NORETRIEVE')
if self._match('RECOVER'):
self._expect_sequence(['DROPPED', 'TABLE'])
self._expect_clp_string()
self._expect('TO')
self._expect_clp_string()
def _parse_runstats_command(self):
"""Parses a RUNSTATS command"""
def parse_index_options():
"""Parses the indexing clauses of a RUNSTATS command"""
# FOR/AND already matched
if self._match('SAMPLED'):
self._expect('DETAILED')
else:
self._match('DETAILED')
self._expect_one_of(['INDEX', 'INDEXES'])
if not self._match('ALL'):
while True:
self._parse_index_name()
if not self._match(','):
break
def parse_column_options(dist):
"""Parses column options clauses of a RUNSTATS command"""
# ON already matched
if (
self._match_sequence(['ALL', 'COLUMNS', 'AND', 'COLUMNS'])
or self._match_sequence(['KEY', 'COLUMNS', 'AND', 'COLUMNS'])
or self._match('COLUMNS')
):
self._expect('(')
while True:
if self._match('('):
self._parse_ident_list()
self._expect(')')
else:
self._expect(TT.IDENTIFIER)
if self._match('LIKE'):
self._expect('STATISTICS')
if dist:
while self._match_one_of(['NUM_FREQVALUES', 'NUM_QUANTILES']):
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
else:
self._expect_one_of(['ALL', 'KEY', 'COLUMNS'])
self._expect('COLUMNS')
# RUNSTATS already matched
self._expect_sequence(['ON', 'TABLE'])
self._parse_table_name()
if self._match_one_of(['USE', 'UNSET']):
self._expect('PROFILE')
else:
if self._match('FOR'):
parse_index_options()
self._match_sequence(['EXCLUDING', 'XML', 'COLUMNS'])
else:
if self._match('ON'):
parse_column_options(dist=False)
if self._match('WITH'):
self._expect('DISTRIBUTION')
if self._match('ON'):
parse_column_options(dist=True)
if self._match('DEFAULT'):
while self._match_one_of(['NUM_FREQVALUES', 'NUM_QUANTILES']):
self._expect(TT.NUMBER)
self._match_sequence(['EXCLUDING', 'XML', 'COLUMNS'])
if self._match('AND'):
parse_index_options()
if self._match('ALLOW'):
self._expect_one_of(['READ', 'WRITE'])
self._expect('ACCESS')
if self._match('TABLESAMPLE'):
self._expect_one_of(['SYSTEM', 'BERNOULLI'])
self._expect('(')
self._expect(TT.NUMBER)
self._expect(')')
if self._match('REPEATABLE'):
self._expect('(')
self._expect(TT.NUMBER)
self._expect(')')
if self._match('SET'):
self._expect('PROFILE')
self._match_one_of(['NONE', 'ONLY'])
elif self._match('UPDATE'):
self._expect('PROFILE')
self._match('ONLY')
if self._match('UTIL_IMPACT_PRIORITY'):
self._match(TT.NUMBER)
def _parse_set_client_command(self):
"""Parses a SET CLIENT command"""
# SET CLIENT already matched
if self._match('CONNECT'):
self._expect(TT.NUMBER)
if self._match('DISCONNECT'):
self._expect_one_of(['EXPLICIT', 'CONDITIONAL', 'AUTOMATIC'])
if self._match('SQLRULES'):
self._expect_one_of(['DB2', 'STD'])
if self._match('SYNCPOINT'):
self._expect_one_of(['ONEPHASE', 'TWOPHASE', 'NONE'])
if self._match('CONNECT_DBPARTITIONNUM'):
self._expect_one_of(['CATALOG_DBPARTITIONNUM', TT.NUMBER])
if self._match('ATTACH_DBPARTITIONNUM'):
self._expect(TT.NUMBER)
def _parse_set_runtime_degree_command(self):
"""Parses a SET RUNTIME DEGREE command"""
# SET RUNTIME DEGREE already matched
self._expect('FOR')
if not self._match('ALL'):
self._expect('(')
while True:
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
self._expect('TO')
self._expect(TT.NUMBER)
def _parse_set_serveroutput_command(self):
"""Parses a SET SERVEROUTPUT command"""
# SET SERVEROUTPUT already matched
self._expect_one_of(['OFF', 'ON'])
def _parse_set_tablespace_containers_command(self):
"""Parses a SET TABLESPACE CONTAINERS command"""
# SET TABLESPACE CONTAINERS already matched
self._expect('FOR')
self._expect(TT.NUMBER)
if self._match_one_of(['REPLAY', 'IGNORE']):
self._expect_sequence(['ROLLFORWARD', 'CONTAINER', 'OPERATIONS'])
self._expect('USING')
if not self._match_sequence(['AUTOMATIC', 'STORAGE']):
self._expect('(')
while True:
if self._expect_one_of(['FILE', 'DEVICE', 'PATH']).value == 'PATH':
self._expect_clp_string()
else:
self._expect_clp_string()
self._expect(TT.NUMBER)
if not self._match(','):
break
self._expect(')')
def _parse_set_tape_position_command(self):
"""Parses a SET TAPE POSITION command"""
# SET TAPE POSITION already matched
if self._match('ON'):
self._expect_clp_string()
self._expect('TO')
self._expect(TT.NUMBER)
def _parse_set_util_impact_priority_command(self):
"""Parses a SET UTIL_IMPACT_PRIORITY command"""
# SET UTIL_IMPACT_PRIORITY already matched
self._expect('FOR')
self._expect(TT.NUMBER)
self._expect('TO')
self._expect(TT.NUMBER)
def _parse_set_workload_command(self):
"""Parses a SET WORKLOAD command"""
# SET WORKLOAD already matched
self._expect('TO')
self._expect_one_of(['AUTOMATIC', 'SYSDEFAULTADMWORKLOAD'])
def _parse_set_write_command(self):
"""Parses a SET WRITE command"""
# SET WRITE already matched
self._expect_one_of(['SUSPEND', 'RESUME'])
self._expect('FOR')
self._expect_one_of(['DATABASE', 'DB'])
def _parse_start_dbm_command(self):
"""Parses a START DBM command"""
# START [DATABASE MANAGER|DB MANAGER|DBM] already matched
if self._match('REMOTE'):
self._match('INSTANCE')
self._expect_clp_string()
self._expect_one_of(['ADMINNODE', 'HOSTNAME'])
self._expect_clp_string()
self._parse_login(optional=False, allowchange=False)
if self._match('ADMIN'):
self._expect('MODE')
if self._match_one_of(['USER', 'GROUP']):
self._expect(TT.IDENTIFIER)
self._match_sequence(['RESTRICTED', 'ACCESS'])
if self._match('PROFILE'):
self._expect_clp_string()
if self._match_one_of(['DBPARTITIONNUM', 'NODE']):
self._expect(TT.NUMBER)
if self._match('ADD'):
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect('HOSTNAME')
self._expect_clp_string()
self._expect('PORT')
self._expect(TT.NUMBER)
if self._match('COMPUTER'):
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
if self._match('NETNAME'):
self._expect_clp_string()
if self._match('LIKE'):
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
elif self._match('WITHOUT'):
self._expect('TABLESPACES')
elif self._match('RESTART'):
if self._match('HOSTNAME'):
self._expect_clp_string()
if self._match('PORT'):
self._expect(TT.NUMBER)
if self._match('COMPUTER'):
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
if self._match('NETNAME'):
self._expect_clp_string()
elif self._match('STANDALONE'):
pass
def _parse_start_hadr_command(self):
"""Parses a START HADR command"""
# START HADR already matched
self._expect('ON')
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
self._expect('AS')
if self._expect_one_of(['PRIMARY', 'STANDBY']).value == 'PRIMARY':
self._match_sequence(['BY', 'FORCE'])
def _parse_stop_dbm_command(self):
"""Parses a STOP DBM command"""
# STOP [DATABASE MANAGER|DB MANAGER|DBM] already matched
if self._match('PROFILE'):
self._expect_clp_string()
if self._match('DROP'):
self._expect_one_of(['DBPARTITIONNUM', 'NODE'])
self._expect(TT.NUMBER)
else:
self._match('FORCE')
if self._match_one_of(['DBPARTITIONNUM', 'NODE']):
self._expect(TT.NUMBER)
def _parse_stop_hadr_command(self):
"""Parses a STOP HADR command"""
# STOP HADR already matched
self._expect('ON')
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_takeover_hadr_command(self):
"""Parses a TAKEOVER HADR command"""
# TAKEOVER HADR already matched
self._expect('ON')
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
if self._match_sequence(['BY', 'FORCE']):
self._match_sequence(['PEER', 'WINDOW', 'ONLY'])
def _parse_terminate_command(self):
"""Parses a TERMINATE command"""
# TERMINATE already matched
pass
def _parse_uncatalog_command(self):
"""Parses an UNCATALOG command"""
if self._match_one_of(['DATABASE', 'DB', 'NODE']):
self._expect_clp_string()
elif self._match('DCS'):
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
elif self._match('LDAP'):
self._expect_one_of(['DATABASE', 'DB', 'NODE'])
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
elif self._match_one_of(['USER', 'SYSTEM']):
self._expect_sequence(['ODBC', 'DATA', 'SOURCE'])
self._expect_clp_string()
elif self._match('ODBC'):
self._expect_sequence(['DATA', 'SOURCE'])
self._expect_clp_string()
else:
self._expected_one_of([
'DATABASE',
'DB',
'NODE',
'DCS',
'LDAP',
'USER',
'SYSTEM',
'ODBC',
])
def _parse_unquiesce_command(self):
"""Parses an UNQUIESCE command"""
# UNQUIESCE already matched
if self._match('INSTANCE'):
self._expect_clp_string()
elif self._match_one_of(['DATABASE', 'DB']):
pass
else:
self._expected_one_of(['DATABASE', 'DB', 'INSTANCE'])
def _parse_update_admin_cfg_command(self):
"""Parses an UPDATE ADMIN CFG command"""
# UPDATE ADMIN CONFIGURATION|CONFIG|CFG already matched
self._expect('USING')
while True:
self._expect(TT.IDENTIFIER)
self._expect_one_of([TT.NUMBER, TT.STRING, TT.IDENTIFIER])
if self._peek_one_of(['FOR', TT.TERMINATOR, TT.EOF]):
break
if self._match_sequence(['FOR', 'NODE']):
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_update_alert_cfg_command(self):
"""Parses an UPDATE ALERT CFG command"""
# UPDATE ALERT CONFIGURATION|CONFIG|CFG already matched
self._expect('FOR')
if (
self._match_sequence(['DATABASE', 'MANAGER'])
or self._match_sequence(['DB', 'MANAGER'])
or self._match_one_of(['DBM', 'CONTAINERS', 'DATABASES', 'TABLESPACES'])
):
pass
elif (
self._match_sequence(['CONTAINER', TT.IDENTIFIER, 'FOR', TT.IDENTIFIER])
or self._match_sequence('TABLESPACE', TT.IDENTIFIER)
or self._match('DATABASE')
):
self._expect('ON')
self._expect_clp_string()
if self._match('USING'):
self._expect_clp_string()
if self._match('SET'):
while True:
self._expect_one_of(['ALARM', 'WARNING', 'SENSITIVITY', 'ACTIONSENABLED', 'THRESHOLDSCHECKED'])
self._expect_one_of([TT.NUMBER, 'YES', 'NO'])
if not self._match(','):
break
elif self._match('ADD'):
while True:
self._expect_one_of(['SCRIPT', 'TASK'])
self._expect_clp_string()
self._expect('TYPE')
if self._match('DB2'):
if (
self._match_sequence(['STATEMENT', 'TERMINATION', 'CHARACTER'])
or self._match_sequence(['STMT', 'TERM', 'CHAR'])
or self._match_sequence(['TERM', 'CHAR'])
):
self._expect_clp_string()
elif self._match_sequence(['OPERATING', 'SYSTEM']) or self._match('OS'):
if (
self._match_sequence(['COMMAND', 'LINE', 'PARAMETERS'])
or self._match('PARMS')
):
self._expect_clp_string()
else:
self._expected_one_of(['DB2', 'OS', 'OPERATING'])
self._expect_sequence(['WORKING', 'DIRECTORY'])
self._expect_clp_string()
self._expect('ON')
if self._expect_one_of(['WARNING', 'ALARM', 'ALLALERT', 'ATTENTION']).value == 'ATTENTION':
self._expect(TT.NUMBER)
if self._match('ON'):
self._expect_clp_string()
self._parse_login(optional=False, allowchange=False)
if not self._match(','):
break
else:
if self._expect_one_of(['SET', 'ADD', 'UPDATE', 'DELETE']).value == 'UPDATE':
update = True
self._expect('ACTION')
while True:
self._expect_one_of(['SCRIPT', 'TASK'])
self._expect_clp_string()
self._expect('ON')
if self._expect_one_of(['WARNING', 'ALARM', 'ALLALERT', 'ATTENTION']).value == 'ATTENTION':
self._expect(TT.NUMBER)
if update:
while True:
self._expect('SET')
self._expect_one_of(['ALARM', 'WARNING', 'SENSITIVITY', 'ACTIONSENABLED', 'THRESHOLDSCHECKED'])
self._expect_one_of([TT.NUMBER, 'YES', 'NO'])
if not self._match(','):
break
if not self._match(','):
break
def _parse_update_alternate_server_command(self):
"""Parses an UPDATE ALTERNATE SERVER command"""
# UPDATE ALTERNATE SERVER already matched
self._expect('FOR')
if self._expect_one_of(['LDAP', 'DATABASE', 'DB']).value == 'LDAP':
self._expect_one_of(['DATABASE', 'DB'])
self._expect_clp_string()
self._expect('USING')
self._expect_one_of(['NODE', 'GWNODE'])
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
else:
self._expect_clp_string()
self._expect_sequence(['USING', 'HOSTNAME'])
self._expect_clp_string()
self._expect('PORT')
self._expect_clp_string()
def _parse_update_cli_cfg_command(self):
"""Parses an UPDATE CLI CFG command"""
# UPDATE CLI CONFIGURATION|CONFIG|CFG already matched
if self._match('AT'):
self._expect_one_of(['GLOBAL', 'USER'])
self._expect('LEVEL')
self._expect_sequence(['FOR', 'SECTION'])
self._expect_clp_string()
self._expect('USING')
while True:
self._expect(TT.IDENTIFIER)
self._expect_one_of([TT.NUMBER, TT.STRING, TT.IDENTIFIER])
if self._peek_one_of([TT.TERMINATOR, TT.EOF]):
break
def _parse_update_command_options_command(self):
"""Parses an UPDATE COMMAND OPTIONS command"""
# UPDATE COMMAND OPTIONS already matched
self._expect('USING')
while True:
option = self._expect_one_of([
'A', 'C', 'D', 'E', 'I', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'V', 'W', 'Z',
]).value
value = self._expect_one_of(['ON', 'OFF']).value
if option in ('E', 'L', 'R', 'Z') and value == 'ON':
self._expect_clp_string()
if self._peek_one_of([TT.TERMINATOR, TT.EOF]):
break
def _parse_update_contact_command(self):
"""Parses an UPDATE CONTACT command"""
# UPDATE CONTACT already matched
self._expect_clp_string()
self._expect('USING')
while True:
if self._match('ADDRESS'):
self._expect_clp_string()
elif self._match('TYPE'):
self._expect_one_of(['EMAIL', 'PAGE'])
elif self._match('MAXPAGELEN'):
self._expect(TT.NUMBER)
elif self._match('DESCRIPTION'):
self._expect_clp_string()
else:
self._expected_one_of(['ADDRESS', 'TYPE', 'MAXPAGELEN', 'DESCRIPTION'])
if not self._match(','):
break
def _parse_update_contactgroup_command(self):
"""Parses an UPDATE CONTACTGROUP command"""
# UPDATE CONTACTGROUP already matched
self._expect_clp_string()
self._expect('(')
while True:
self._expect_one_of(['ADD', 'DROP'])
self._expect_one_of(['CONTACT', 'GROUP'])
self._expect_clp_string()
if not self._match(','):
break
self._expect(')')
if self._match('DESCRIPTION'):
self._expect_clp_string()
def _parse_update_db_cfg_command(self):
"""Parses an UPDATE DB CFG command"""
# UPDATE DATABASE|DB CONFIGURATION|CONFIG|CFG already matched
if self._match('FOR'):
self._expect_clp_string()
if self._match_one_of(['DBPARTITIONNUM', 'NODE']):
self._expect(TT.NUMBER)
self._expect('USING')
while True:
self._expect(TT.IDENTIFIER)
if self._match_one_of(['AUTOMATIC', 'MANUAL']):
pass
else:
self._expect_one_of([TT.NUMBER, TT.STRING, TT.IDENTIFIER])
self._match('AUTOMATIC')
if self._peek_one_of(['IMMEDIATE', 'DEFERRED', TT.TERMINATOR, TT.EOF]):
break
self._match_one_of(['IMMEDIATE', 'DEFERRED'])
def _parse_update_dbm_cfg_command(self):
"""Parses an UPDATE DBM CFG command"""
# UPDATE DATABASE MANAGER|DB MANAGER|DBM CONFIGURATION|CONFIG|CFG already matched
self._expect('USING')
while True:
self._expect(TT.IDENTIFIER)
if self._match_one_of(['AUTOMATIC', 'MANUAL']):
pass
else:
self._expect_one_of([TT.NUMBER, TT.STRING, TT.IDENTIFIER])
self._match('AUTOMATIC')
if self._peek_one_of(['IMMEDIATE', 'DEFERRED', TT.TERMINATOR, TT.EOF]):
break
self._match_one_of(['IMMEDIATE', 'DEFERRED'])
def _parse_update_notification_list_command(self):
"""Parses an UPDATE NOTIFICATION LIST command"""
# UPDATE [HEALTH] NOTIFICATION [CONTACT] LIST already matched
first = True
while True:
if not self._match_one_of(['ADD', 'DROP']):
if not first:
break
else:
self._expected_one_of(['ADD', 'DROP'])
first = False
self._expect_one_of(['CONTACT', 'GROUP'])
self._expect_clp_string()
def _parse_update_history_command(self):
"""Parses an UPDATE HISTORY command"""
# UPDATE HISTORY already matched
self._expect_one_of(['FOR', 'EID'])
self._expect(TT.NUMBER)
self._expect('WITH')
if self._match('LOCATION'):
self._expect_clp_string()
self._expect_sequence(['DEVICE', 'TYPE'])
self._expect_one_of(['D', 'K', 'T', 'A', 'F', 'U', 'P', 'N', 'X', 'Q', 'O'])
elif self._match('COMMENT'):
self._expect_clp_string()
elif self._match('STATUS'):
self._expect_one_of(['A', 'I', 'E', 'D', 'X'])
else:
self._expected_one_of(['LOCATION', 'COMMENT', 'STATUS'])
def _parse_update_ldap_node_command(self):
"""Parses an UPDATE LDAP NODE command"""
# UPDATE LDAP NODE already matched
self._expect_clp_string()
if self._match('HOSTNAME'):
self._expect_clp_string()
if self._match('SVCENAME'):
self._expect_clp_string()
if self._match('WITH'):
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
def _parse_update_monitor_switches_command(self):
"""Parses an UPDATE MONITOR SWITCHES command"""
# UPDATE MONITOR SWITCHES already matched
self._expect('USING')
first = True
while True:
if not self._match_one_of(['BUFFERPOOL', 'LOCK', 'SORT', 'STATEMENT', 'TABLE', 'TIMESTAMP', 'UOW']):
if not first:
break
else:
self._expected_one_of(['BUFFERPOOL', 'LOCK', 'SORT', 'STATEMENT', 'TABLE', 'TIMESTAMP', 'UOW'])
first = False
self._expect_one_of(['OFF', 'ON'])
self._parse_db_partition_clause()
def _parse_update_xmlschema_command(self):
"""Parses an UPDATE XMLSCHEMA command"""
# UPDATE XMLSCHEMA already matched
self._parse_subschema_name()
self._expect('WITH')
self._parse_subschema_name()
self._match_sequence(['DROP', 'NEW', 'SCHEMA'])
def _parse_upgrade_db_command(self):
"""Parses an UPGRADE DB command"""
# UPGRADE DATABASE|DB already matched
self._expect_clp_string()
self._parse_login(optional=True, allowchange=False)
# COMPOUND COMMANDS ######################################################
def _parse_command(self):
"""Parses a top-level CLP command in a DB2 script"""
# Ambiguity: Some CLP commands start with the same keywords as SQL
# statements (e.g. CREATE DATABASE and CREATE DATABASE PARTITION
# GROUP). Attempt to parse the statement as a CLP statement, rewind
# and try to parse as an SQL command if that fails. This is one reason
# for the message "The command was processed as an SQL statement
# because it was not a valid Command Line Processor command" in DB2;
# there are two very different and separate parsers, one for CLP which
# tries to parse a command first, which defers to the secondary SQL
# parser if it fails.
self._save_state()
try:
if self._match('ACTIVATE'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_activate_database_command()
elif self._match('ATTACH'):
self._parse_attach_command()
elif self._match('AUTOCONFIGURE'):
self._parse_autoconfigure_command()
elif self._match('BACKUP'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_backup_command()
elif self._match('CATALOG'):
self._parse_catalog_command()
elif self._match('CONNECT'):
self._parse_connect_command()
elif self._match('CREATE'):
if self._match_one_of(['DATABASE', 'DB']):
if self._match('PARTITION'):
raise ParseBacktrack()
self._parse_create_database_command()
elif self._match('TOOLS'):
self._expect('CATALOG')
self._parse_create_tools_catalog_command()
else:
raise ParseBacktrack()
elif self._match('DEACTIVATE'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_deactivate_database_command()
elif self._match('DETACH'):
self._parse_detach_command()
elif self._match('DISCONNECT'):
self._parse_disconnect_command()
elif self._match('DROP'):
if self._match_one_of(['DATABASE', 'DB']):
self._parse_drop_database_command()
elif self._match('TOOLS'):
self._expect('CATALOG')
self._parse_drop_tools_catalog_command()
else:
raise ParseBacktrack()
elif self._match('ECHO'):
self._parse_echo_command()
elif self._match('EXPORT'):
self._parse_export_command()
elif self._match('FORCE'):
self._expect('APPLICATION')
self._parse_force_application_command()
elif self._match('GET'):
if self._match('ADMIN'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_get_admin_cfg_command()
elif self._match('ALERT'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_get_alert_cfg_command()
elif self._match('CLI'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_get_cli_cfg_command()
elif self._match('CONNECTION'):
self._expect('STATE')
self._parse_get_connection_state_command()
elif self._match('CONTACTGROUP'):
self._parse_get_contactgroup_command()
elif self._match('CONTACTGROUPS'):
self._parse_get_contactgroups_command()
elif self._match('CONTACTS'):
self._parse_get_contacts_command()
elif self._match_one_of(['DATABASE', 'DB']):
if self._match_one_of(['CONFIGURATION', 'CONFIG', 'CFG']):
self._parse_get_db_cfg_command()
elif self._match('MANAGER'):
if self._match_one_of(['CONFIGURATION', 'CONFIG', 'CFG']):
self._parse_get_dbm_cfg_command()
elif self._match_sequence(['MONITOR', 'SWITCHES']):
self._parse_get_dbm_monitor_switches_command()
else:
self._expected_one_of(['CONFIGURATION', 'CONFIG', 'CFG', 'MONITOR'])
elif self._match('DBM'):
if self._match_one_of(['CONFIGURATION', 'CONFIG', 'CFG']):
self._parse_get_dbm_cfg_command()
elif self._match_sequence(['MONITOR', 'SWITCHES']):
self._parse_get_dbm_monitor_switches_command()
else:
self._expected_one_of(['CONFIGURATION', 'CONFIG', 'CFG', 'MONITOR'])
elif self._match('DESCRIPTION'):
self._expect_sequence(['FOR', 'HEALTH', 'INDICATOR'])
self._parse_get_description_for_health_indicator_command()
elif self._match('HEALTH'):
if self._match('NOTIFICATION'):
self._expect_sequence(['CONTACT', 'LIST'])
self._parse_get_notification_list_command()
elif self._match('SNAPSHOT'):
self._parse_get_health_snapshot_command()
else:
self._expected_one_of(['NOTIFICATION', 'SNAPSHOT'])
elif self._match('INSTANCE'):
self._parse_get_instance_command()
elif self._match('MONITOR'):
self._expect('SWITCHES')
self._parse_get_monitor_switches_command()
elif self._match('NOTIFICATION'):
self._expect('LIST')
self._parse_get_notification_list_command()
elif self._match('RECOMMENDATIONS'):
self._expect_sequence(['FOR', 'HEALTH', 'INDICATOR'])
self._parse_get_recommendations_for_health_indicator_command()
elif self._match('ROUTINE'):
self._parse_get_routine_command()
elif self._match('SNAPSHOT'):
self._parse_get_snapshot_command()
else:
raise ParseBacktrack()
elif self._match('IMPORT'):
self._parse_import_command()
elif self._match('INITIALIZE'):
self._expect('TAPE')
self._parse_initialize_tape_command()
elif self._match('INSPECT'):
self._parse_inspect_command()
elif self._match('INSTANCE'):
self._parse_instance_command()
elif self._match('LIST'):
if self._match('ACTIVE'):
self._expect('DATABASES')
self._parse_list_active_databases_command()
elif self._match('ADMIN'):
self._expect_sequence(['NODE', 'DIRECTORY'])
self._parse_list_node_directory_command()
elif self._match('APPLICATIONS'):
self._parse_list_applications_command()
elif self._match('COMMAND'):
self._expect('OPTIONS')
self._parse_list_command_options_command()
elif self._match_one_of(['DATABASE', 'DB']):
if self._match('DIRECTORY'):
self._parse_list_db_directory_command()
elif self._match('PARTITION'):
self._expect('GROUPS')
self._parse_list_database_partition_groups_command()
else:
self._expected_one_of(['DIRECTORY', 'PARTITION'])
elif self._match_one_of(['DBPARTITIONNUMS', 'NODES']):
self._parse_list_nodes_command()
elif self._match('DCS'):
if self._match('APPLICATIONS'):
self._parse_list_dcs_applications_command()
elif self._match('DIRECTORY'):
self._parse_list_dcs_directory_command()
else:
self._expected_one_of(['APPLICATIONS', 'DIRECTORY'])
elif self._match('DRDA'):
self._expect_sequence(['INDOUBT', 'TRANSACTIONS'])
self._parse_list_drda_indoubt_transactions_command()
elif self._match('HISTORY'):
self._parse_list_history_command()
elif self._match('INDOUBT'):
self._expect('TRANSACTIONS')
self._parse_list_indoubt_transactions_command()
elif self._match('NODE'):
self._expect('DIRECTORY')
self._parse_list_node_directory_command()
elif self._match_one_of(['USER', 'SYSTEM']):
self._expect_sequence(['ODBC', 'DATA', 'SOURCES'])
self._parse_list_odbc_data_sources_command()
elif self._match('ODBC'):
self._expect_sequence(['DATA', 'SOURCES'])
self._parse_list_odbc_data_sources_command()
elif self._match_one_of(['PACKAGES', 'TABLES']):
self._parse_list_tables_command(self)
elif self._match('TABLESPACES'):
if self._match('CONTAINERS'):
self._parse_list_tablespace_containers_command()
else:
self._parse_list_tablespaces_command()
elif self._match('UTILITIES'):
self._parse_list_utilities_command()
else:
self._expected_one_of([
'ACTIVE',
'ADMIN',
'APPLICATIONS',
'COMMAND',
'DATABASE',
'DB',
'DBPARTITIONNUMS',
'DCS',
'DRDA',
'HISTORY',
'INDOUBT',
'NODE',
'NODES',
'ODBC',
'PACKAGES',
'SYSTEM',
'TABLES',
'TABLESPACES',
'USER',
'UTILITIES',
])
elif self._match('LOAD'):
if self._match('QUERY'):
self._parse_load_query_command()
else:
self._parse_load_command()
elif self._match('MIGRATE'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_migrate_db_command()
elif self._match('ON'):
self._parse_on_command()
elif self._match('PING'):
self._parse_ping_command()
elif self._match_one_of(['PRECOMPILE', 'PREP']):
self._parse_precompile_command()
elif self._match('PRUNE'):
if self._match('HISTORY'):
self._parse_prune_history_command()
elif self._match('LOGFILE'):
self._parse_prune_logfile_command()
else:
self._expected_one_of(['HISTORY', 'LOGFILE'])
elif self._match('PUT'):
self._expect('ROUTINE')
self._parse_put_routine_command()
elif self._match('QUERY'):
self._expect('CLIENT')
self._parse_query_client_command()
elif self._match('QUIESCE'):
if self._match('TABLESPACES'):
self._parse_quiesce_tablespaces_command()
else:
self._parse_quiesce_command()
elif self._match('QUIT'):
self._parse_quit_command()
elif self._match('REBIND'):
self._parse_rebind_command()
elif self._match('RECOVER'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_recover_db_command()
elif self._match('REDISTRIBUTE'):
self._expect_sequence(['DATABASE', 'PARTITION', 'GROUP'])
self._parse_redistribute_database_partition_group_command()
elif self._match('REFRESH'):
if self._match('LDAP'):
self._parse_refresh_ldap_command()
else:
raise ParseBacktrack()
elif self._match('REGISTER'):
if self._match('XMLSCHEMA'):
self._parse_register_xmlschema_command()
elif self._match('XSROBJECT'):
self._parse_register_xsrobject_command()
else:
self._parse_register_command()
elif self._match('REORG'):
self._parse_reorg_command()
elif self._match('REORGCHK'):
self._parse_reorgchk_command()
elif self._match('RESET'):
if self._match('ADMIN'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_reset_admin_cfg_command()
elif self._match('ALERT'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_reset_alert_cfg_command()
elif self._match_one_of(['DATABASE', 'DB']):
if self._match('MANAGER'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_reset_dbm_cfg_command()
else:
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_reset_db_cfg_command()
elif self._match('DBM'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_reset_dbm_cfg_command()
elif self._match('MONITOR'):
self._parse_reset_monitor_command()
else:
self._expected_one_of([
'ADMIN',
'ALERT',
'DATABASE',
'DB',
'DBM',
'MONITOR',
])
elif self._match('RESTART'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_restart_db_command()
elif self._match('RESTORE'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_restore_db_command()
elif self._match('REWIND'):
self._expect('TAPE')
self._parse_rewind_tape_command()
elif self._match('ROLLFORWARD'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_rollforward_db_command()
elif self._match('RUNSTATS'):
self._parse_runstats_command()
elif self._match('SET'):
if self._match('CLIENT'):
self._parse_set_client_command()
elif self._match('RUNTIME'):
self._expect('DEGREE')
self._parse_set_runtime_degree_command()
elif self._match('SERVEROUTPUT'):
self._parse_set_serveroutput_command()
elif self._match('TABLESPACE'):
self._expect('CONTAINERS')
self._parse_set_tablespace_containers_command()
elif self._match('TAPE'):
self._expect('POSITION')
self._parse_set_tape_position_command()
elif self._match('UTIL_IMPACT_PRIORITY'):
self._parse_set_util_impact_priority_command()
elif self._match('WORKLOAD'):
self._parse_set_workload_command()
elif self._match('WRITE'):
self._parse_set_write_command()
else:
raise ParseBacktrack()
elif self._match('START'):
if self._match('HADR'):
self._parse_start_hadr_command()
elif self._match_one_of(['DATABASE', 'DB']):
self._expect('MANAGER')
self._parse_start_dbm_command()
elif self._match('DBM'):
self._parse_start_dbm_command()
else:
self._expected_one_of(['HADR', 'DATABASE', 'DB', 'DBM'])
elif self._match('STOP'):
if self._match('HADR'):
self._parse_stop_hadr_command()
elif self._match_one_of(['DATABASE', 'DB']):
self._expect('MANAGER')
self._parse_stop_dbm_command()
elif self._match('DBM'):
self._parse_stop_dbm_command()
else:
self._expected_one_of(['HADR', 'DATABASE', 'DB', 'DBM'])
elif self._match('TAKEOVER'):
self._parse_takeover_hadr_command()
elif self._match('TERMINATE'):
self._parse_terminate_command()
elif self._match('UNCATALOG'):
self._parse_uncatalog_command()
elif self._match('UNQUIESCE'):
self._parse_unquiesce_command()
elif self._match('UPDATE'):
if self._match('ADMIN'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_update_admin_cfg_command()
elif self._match('ALERT'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_update_alert_cfg_command()
elif self._match_sequence(['ALTERNATE', 'SERVER']):
self._parse_update_alternate_server_command()
elif self._match('CLI'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_update_cli_cfg_command()
elif self._match_sequence(['COMMAND', 'OPTIONS']):
self._parse_update_command_options_command()
elif self._match('CONTACT'):
self._parse_update_contact_command()
elif self._match('CONTACTGROUP'):
self._parse_update_contactgroup_command()
elif self._match_one_of(['DATABASE', 'DB']):
if self._match('MANAGER'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_update_dbm_cfg_command()
else:
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_update_db_cfg_command()
elif self._match('DBM'):
self._expect_one_of(['CONFIGURATION', 'CONFIG', 'CFG'])
self._parse_update_dbm_cfg_command()
elif (
self._match_sequence(['HEALTH', 'NOTIFICATION', 'CONTACT', 'LIST'])
or self._match_sequence(['NOTIFICATION', 'LIST'])
):
self._parse_update_notification_list_command()
elif self._match('HISTORY'):
self._parse_update_history_command()
elif self._match_sequence(['LDAP', 'NODE']):
self._parse_update_ldap_node_command()
elif self._match_sequence(['MONITOR', 'SWITCHES']):
self._parse_update_monitor_switches_command()
elif self._match('XMLSCHEMA'):
self._parse_update_xmlschema_command()
else:
raise ParseBacktrack()
elif self._match('UPGRADE'):
self._expect_one_of(['DATABASE', 'DB'])
self._parse_upgrade_db_command()
else:
raise ParseBacktrack()
except ParseBacktrack:
self._restore_state()
self._parse_statement()
else:
self._forget_state()
def _parse_top(self):
# Override _parse_top to make a CLP command the top of the parse tree
self._parse_command()
def _parse_init(self, tokens):
# Override _parse_init to set up the output lists (produces, consumes,
# etc.)
super(DB2ZOSScriptParser, self)._parse_init(tokens)
self.produces = []
self.consumes = []
self.connections = []
self.current_connection = None
self.current_user = None
def _save_state(self):
# Override _save_state to save the state of the output lists (produces,
# consumes, etc.)
self._states.append((
self._index,
self._level,
len(self._output),
self.current_schema,
self.current_user,
self.current_connection,
len(self.produces),
len(self.consumes),
len(self.connections),
))
def _restore_state(self):
# Override _restore_state to restore the state of the output lists
# (produces, consumes, etc.)
(
self._index,
self._level,
output_len,
self.current_schema,
self.current_user,
self.current_connection,
produces_len,
consumes_len,
logins_len,
) = self._states.pop()
del self.produces[produces_len:]
del self.consumes[consumes_len:]
del self.connections[logins_len:]
del self._output[output_len:]
|
gpl-3.0
| 2,388,590,240,867,676,700
| 39.435933
| 143
| 0.47767
| false
| 4.567151
| false
| false
| false
|
jp-security/LeagueStats
|
app/auth/forms.py
|
1
|
1957
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError, SelectField, IntegerField, DecimalField
from wtforms.validators import Required, Email, Length, Regexp, EqualTo, NumberRange
from wtforms import ValidationError
from ..models import User
class LoginForm(FlaskForm):
email = StringField('Email', validators=[Required(), Length(1, 64), Email()])
password = PasswordField('Password', validators=[Required()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Log In')
class RegistrationForm(FlaskForm):
email = StringField('Email', validators=[Required(), Length(1, 64), Email()])
username = StringField('Username', validators=[Required(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0,
'Usernames must have only letters, '
'numbers, dots or underscores')])
password = PasswordField('Password', validators=[Required(), EqualTo('password2', message='Password must match.')])
password2 = PasswordField('Confirm password', validators=[Required()])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Usernmae already in use.')
class ChangePasswordForm(FlaskForm):
old_password = PasswordField('Old Password', validators=[Required()])
password = PasswordField('New Password', validators=[Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm new password', validators=[Required()])
submit = SubmitField('Updated Password')
|
gpl-3.0
| -6,136,674,587,046,894,000
| 56.558824
| 131
| 0.662749
| false
| 4.681818
| false
| false
| false
|
hilarry/cmdb
|
cmdb/settings.py
|
1
|
2308
|
"""
Django settings for cmdb project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*f)1h7v-ed7bajus^ykj0fe5n*#ld57m@4ca=a3!%v%3@o_7p#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
#'bootstrap_admin',
#'grappelli',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'device_manage',
'idcroom_manage',
'operation',
#'bootstrap3',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'cmdb.urls'
WSGI_APPLICATION = 'cmdb.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'cmdb',
'USER': 'cmdb',
'PASSWORD': 'cmdb',
'HOST': 'localhost',
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'zh-cn'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
try:
from local_settings import *
except ImportError:
pass
|
apache-2.0
| 5,920,079,684,958,420,000
| 22.313131
| 71
| 0.69974
| false
| 3.283073
| false
| false
| false
|
recipy/recipy
|
integration_test/test_packages.py
|
1
|
18672
|
"""
recipy test case runner.
Run tests to check that recipy logs information on input and output
functions invoked by scripts which use packages that recipy has been
configured to log.
Tests are specified using a [YAML](http://yaml.org/) (YAML Ain't
Markup Language) configuration file. YAML syntax is:
* `---` indicates the start of a document.
* `:` denotes a dictionary. `:` must be followed by a space.
* `-` denotes a list.
The test configuration file has format:
---
script: SCRIPT
[standalone: True|False]
libraries: [LIBRARY, LIBRARY, ... ]
test_cases:
- libraries: [LIBRARY, LIBRARY, ... ]
arguments: [..., ..., ...]
inputs: [INPUT, INPUT, ...]
outputs: [OUTPUT, OUTPUT, ...]
- libraries: [LIBRARY, LIBRARY, ... ]
arguments: [..., ..., ...]
inputs: [INPUT, INPUT, ...]
outputs: [OUTPUT, OUTPUT, ...]
[ skip: "Known issue with recipy" ]
[ skip_py_version: [3.4, ...] ]
- ...
---
script: SCRIPT
...
where each script to be tested is defined by:
* 'SCRIPT': script, with a relative or absolute path. For recipy
sample scripts, the script is assumed in a sub-directory
"integration_test/packages".
* 'standalone': is the script a standalone script? If "False", or if
omitted, then the script is assumed to be a recipy sample script,
runnable via the command 'python -m
integration_test.packages.<script>'.
* 'libraries': A list of zero or more libraries used by the script,
which are expected to be logged by recipy when the script is
run regardless of arguments (i.e. any libraries common to all test
cases). If none, then this can be omitted.
* One or more test cases, each of which defines:
- 'libraries': A list of zero or more libraries used by the script,
which are expected to be logged by recipy when the script is
run with the given arguments. If none, then this can be
omitted.
- 'arguments': A list of arguments to be passed to the script. If
none, then this can be omitted.
- 'inputs': A list of zero or more input files which the script will
read, and which are expected to be logged by recipy when running
the script with the arguments. If none, then this can be omitted.
- 'outputs': A list of zero or more output files which the script
will write, and which are expected to be logged by recipy when
running the script with the arguments. If none, then this can be
omitted.
- 'skip': An optional value. If present this test case is marked as
skipped. The value is the reason for skipping the test case.
- 'skip_py_version': An optional value. If present this test case is marked
as skipped if the current Python version is in the list of values. Should
be used when a patched library does not support a Python version that is
supported by recipy.
For example:
---
script: run_numpy.py
libraries: [numpy]
test_cases:
- arguments: [loadtxt]
inputs: [input.csv]
- arguments: [savetxt]
outputs: [output.csv]
- arguments: [load_and_save_txt]
inputs: [input.csv]
outputs: [output.csv]
---
script: "/home/users/user/run_my_script.py"
standalone: True
test_cases:
- arguments: [ ]
libraries: [ numpy ]
outputs: [ data.csv ]
It is up to the developer to ensure the 'libraries', 'input' and
'output' lists correctly record the libraries, input and output files
that it is expected recipy will log when the script is run with the
given arguments.
The test configuration file is provided via an environment variable,
'RECIPY_TEST_CONFIG'. If undefined, then a default of
'integration_test/config/test_packages.yml' is assumed.
"""
import os
import os.path
import sys
import pytest
from integration_test.database import DatabaseError
from integration_test import environment
from integration_test.file_utils import load_yaml
from integration_test import helpers
from integration_test import recipy_environment as recipyenv
SCRIPT = "script"
""" Test case configuration key. """
STANDALONE = "standalone"
""" Test case configuration key. """
TEST_CASES = "test_cases"
""" Test case configuration key. """
LIBRARIES = "libraries"
""" Test case configuration key. """
ARGUMENTS = "arguments"
""" Test case configuration key. """
INPUTS = "inputs"
""" Test case configuration key. """
OUTPUTS = "outputs"
""" Test case configuration key. """
SKIP = "skip"
""" Test case configuration key. """
SKIP_PY_VERSION = "skip_py_version"
""" Test case configuration key. """
TEST_CONFIG_ENV = "RECIPY_TEST_CONFIG"
""" Environment variable for recipy test configuration file name """
DEFAULT_CONFIG = "integration_test/config/test_packages.yml"
""" Default recipy test configuration file name """
DEFAULT_SAMPLES = "integration_test/packages"
""" Default recipy sample scripts directory """
class ConfigError(Exception):
"""Test configuration error."""
def __init__(self, message, exception=None):
"""Create error.
:param message: Message
:type message: str or unicode
:param exception: Exception
:type value: Exception
"""
super(ConfigError, self).__init__()
self._message = message
self._exception = exception
def __str__(self):
"""Get error as a formatted string.
:return: formatted string
:rtype: str or unicode
"""
message = self._message
if self._exception is not None:
message += " : " + str(self._exception)
return repr(message)
@property
def exception(self):
"""Get exception.
:param exception: Exception
:type value: Exception
"""
return self._exception
def get_test_cases():
"""
py.test callback to associate each test script with its test
cases. This function:
* Gets the test configuration file name from the environment
variable 'RECIPY_TEST_CONFIG'. If undefined, then a default of
'integration_test/config/test_packages.yml' is assumed.
* Loads the test configuration file.
* Creates a list of standalone tuples, each representing one
test case, using get_script_test_cases.
py.test parameterized tests generates one test function per
tuple.
:return: test cases
:rtype: list of (str or unicode, str or unicode, dict)
"""
config_file = helpers.get_environment_value(TEST_CONFIG_ENV,
DEFAULT_CONFIG)
configuration = load_yaml(config_file)
return get_script_test_cases(configuration, DEFAULT_SAMPLES)
def get_test_case_function_name(script_test_case):
"""
py.test callback to generate test case function names.
Function names are of form 'script_arguments' where 'script'
and 'arguments' are the 'script_path' conjoined to the test case's
'arguments' with with all forward slashes, backslashes, colons,
semi-colons and spaces replaced by '_'.
:param script_test_case: Script path, command, test case
specification (a tuple from get_script_test_cases).
:type script_test_case: (str or unicode, str or unicode, dict)
:return: Test case function name
:rtype: str or unicode
"""
[script_path, _, test_case] = script_test_case
arguments = [str(argument) for argument in test_case[ARGUMENTS]]
function_name = "_".join(arguments)
function_name = os.path.split(script_path)[1] + "_" + function_name
for char in [" ", "\\", "/", ":", ";", "."]:
function_name = function_name.replace(char, "_")
return function_name
def get_script_test_cases(configurations, recipy_samples_directory):
"""
Creates a list of standalone tuples, each representing one test
case.
This function takes test configurations, a list of dictionaries,
each of which has a 'script', optional 'standalone' flag, optional
'libaries' list and 'test_cases', a list of one or more test cases
(each of which is a dictionary of 'libraries', 'arguments',
'inputs', 'outputs', optional 'skip').
It returns a list of tuples (script path, command, test case) where:
* script_path is the path to the script:
- If the test configuration has a 'standalone' value of "False",
or no such value, then the script is assumed to be a recipy
sample script in "integration_test/packages/".
- Otherwise, the 'script' configuration value is used as-is.
* commmand is the command-line invocation that will be used to run
the script (not including "python" or any arguments, which are
test-case specific):
- If the test configuration has a 'standalone' value of "False",
or no such value, then the command to run the script is
assumed to be "-m integration_test.packages.SCRIPT"
- Otherwise, the 'script' configuration value is used as-is.
* test_case is a single test case configuration, with any common
libraries appended to its 'libraries'.
If any test case contains a 'skip' entry then that test case is marked
up via pytest.mark.skip.
:param configurations: Test configurations
:type dict: list of dict
:param recipy_samples_directory: directory with recipy samples
:type recipy_samples_directory: str or unicode
:return: test cases
:rtype: list of (str or unicode, str or unicode, dict)
"""
test_cases = []
for configuration in configurations:
script = configuration[SCRIPT]
if STANDALONE not in configuration:
# recipy sample test
script_path = os.path.join(recipy_samples_directory, script)
# e.g. integration_test/packages/run_numpy.py
script_module = os.path.splitext(script_path)[0]
# e.g. integration_test/packages/run_numpy
script_module = script_module.replace("/", ".")
script_module = script_module.replace("\\", ".")
# e.g. integration_test.packages.run_numpy
command = ["-m", script_module]
# e.g. -m integration_test.packages.run_numpy
else:
script_path = script
command = [script]
if LIBRARIES in configuration:
common_libraries = configuration[LIBRARIES]
else:
common_libraries = []
for test_case in configuration[TEST_CASES]:
if LIBRARIES in test_case:
test_case[LIBRARIES].extend(common_libraries)
else:
test_case[LIBRARIES] = common_libraries
single_test_case = (script_path, command, test_case)
if SKIP in test_case:
reason = get_test_case_function_name(single_test_case)
reason = reason + ": " + test_case[SKIP]
single_test_case = pytest.mark.skip(
reason=reason)((single_test_case))
if SKIP_PY_VERSION in test_case:
py_version = '{}.{}'.format(sys.version_info.major,
sys.version_info.minor)
to_skip = [str(num) for num in test_case[SKIP_PY_VERSION]]
reason = get_test_case_function_name(single_test_case)
reason = reason + ": unsupported Python version " + py_version
single_test_case = pytest.mark.skipif(
py_version in to_skip,
reason=reason)((single_test_case))
test_cases.append(single_test_case)
return test_cases
def run_test_case(script_path, command, test_case):
"""
Run a single test case. This runs a script using arguments in
test_case and validates that recipy has logged information
about the script, also using data in test_case.
test_case is assumed to have the following
entries:
* 'libraries': a list of one or more libraries e.g. ['numpy'].
* 'arguments': a list of script arguments e.g. ['loadtxt'],
['savetxt']. If none, then this can be omitted.
* 'inputs': a list of zero or more input files which running
the script with the argument will read e.g. ['data.csv']. If
none, then this can be omitted.
* 'outputs': a list of zero or more output files which running
the script with the argument will write
e.g. ['data.csv']. If none, then this can be omitted.
:param script_path: Path to the script.
:type script_path: str or unicode
:param commmand: Command-line invocation used to run the script
(not including "python" or any arguments, which are test-case
specific).
:type command: str or unicode
:param test_case: Test case configuration.
:type test_case: dict
"""
number_of_logs = 0
try:
number_of_logs =\
helpers.get_number_of_logs(recipyenv.get_recipydb())
except DatabaseError:
# Database may not exist if running tests for first time so
# give benefit of doubt at this stage and assume running script
# will bring it into life.
pass
libraries = test_case[LIBRARIES]
if ARGUMENTS in test_case:
arguments = test_case[ARGUMENTS]
else:
arguments = []
# Execute script
_, _ = helpers.execute_python(command + arguments, 0)
# Validate recipy database
log, _ = helpers.get_log(recipyenv.get_recipydb())
# Number of logs
new_number_of_logs =\
helpers.get_number_of_logs(recipyenv.get_recipydb())
assert new_number_of_logs == (number_of_logs + 1),\
("Unexpected number of logs " + new_number_of_logs)
# Script that was invoked
check_script(script_path, log["script"],
arguments, log["command_args"])
# Libraries
check_libraries(libraries, log["libraries"])
# Inputs and outputs (local filenames only)
check_input_outputs(test_case, INPUTS, log["inputs"])
check_input_outputs(test_case, OUTPUTS, log["outputs"])
# Dates
check_dates(log["date"], log["exit_date"])
# Execution environment
check_environment(log["command"], log["environment"])
# Miscellaneous
assert environment.get_user() == log["author"], "Unexpected author"
assert log["description"] == "", "Unexpected description"
def check_script(script, logged_script, arguments, logged_arguments):
"""
Check script and arguments logged by recipy.
:param script: Script specified in test configuration
:type script: str or unicode
:param logged_script: Script logged by recipy
:type logged_script: str or unicode
:param arguments: Arguments specified in test configuration
:type arguments: list
:param logged_arguments: Arguments logged by recipy
:type logged_arguments: list
"""
# Use os.path.abspath as os.path.samefile is not supported in
# Python 2 on Windows.
assert os.path.abspath(script) == os.path.abspath(logged_script),\
"Unexpected script"
assert " ".join(arguments) == logged_arguments, "Unexpected command_args"
def check_libraries(libraries, logged_libraries):
"""
Check libraries logged by recipy.
:param libraries: Libraries specified in test configuration
:type libraries: list of str or unicode
:param logged_libraries: Libraries logged by recipy
:type logged_libraries: list of str or unicode
:raises ConfigError: if any library is not installed
"""
packages = environment.get_packages()
for library in libraries:
if environment.is_package_installed(packages, library):
version = environment.get_package_version(packages, library)
library_version = library + " v" + version
assert library_version in logged_libraries,\
("Could not find library " + library_version)
else:
raise ConfigError(("Library {} is not installed".format(library)))
def check_dates(logged_start_date, logged_end_date):
"""
Check dates logged by recipy.
:param logged_start_date: Start date logged by recipy
:type logged_start_date: str or unicode
:param logged_end_date: End date logged by recipy
:type logged_end_date: str or unicode
"""
try:
start_date = environment.get_tinydatestr_as_date(logged_start_date)
except ValueError as _:
assert False, "date is not a valid date string"
try:
exit_date = environment.get_tinydatestr_as_date(logged_end_date)
except ValueError as _:
assert False, "end_date is not a valid date string"
assert start_date <= exit_date, "date is not before exit_date"
def check_environment(logged_command, logged_environment):
"""
Check environment logged by recipy.
:param logged_command: Python executable logged by recipy
:type logged_command: str or unicode
:param logged_environment: Operating system and Python
version logged by recipy
:type logged_environment: list of str or unicore
"""
assert environment.get_python_exe() == logged_command,\
"Unexpected command"
assert environment.get_os() in logged_environment,\
"Cannot find operating system in environment"
python_version = "python " + environment.get_python_version()
assert python_version in logged_environment,\
"Cannot find Python in environment"
def check_input_outputs(test_case, io_key, logged_io):
"""
Check inputs/outputs logged by recipy.
:param test_case: Test case configuration
:type test_case: dict
:param io_key: "inputs" or "outputs", key into test_case
:type io_key: str or unicode
:param logged_io: Inputs/outputs logged by recipy
:type logged_io: list
"""
if io_key in test_case:
io_files = test_case[io_key]
else:
io_files = []
assert len(io_files) == len(logged_io),\
("Unexpected number of " + io_key)
# Convert logged files to local file names.
logged_files = [os.path.basename(file_name)
for [file_name, _] in logged_io]
for io_file in io_files:
assert io_file in logged_files,\
("Could not find " + io_key + " " + io_file)
@pytest.mark.parametrize("script_test_case",
get_test_cases(),
ids=get_test_case_function_name)
def test_scripts(script_test_case):
"""
Run a test defined in the recipy test configuration.
:param script_test_case: Ncript path, command, test case
specification - consistent with a tuple from
get_script_test_cases.
:type script_test_case: (str or unicode, str or unicode, dict)
"""
(script_path, command, test_case) = script_test_case
run_test_case(script_path, command, test_case)
|
apache-2.0
| -3,213,694,212,848,131,600
| 36.121272
| 78
| 0.655848
| false
| 4.031959
| true
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.