repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
toomanycats/IndeedScraper
|
test.py
|
1
|
1136
|
from flask import Flask, render_template
from bokeh.charts import Bar
from bokeh.embed import components
from bokeh.util.string import encode_utf8
from bokeh.plotting import figure
import pandas as pd
app = Flask(__name__)
@app.route('/')
def test():
kws = ["one", "two", "cat", "dog"]
count = [23, 45, 11, 87]
df = pd.DataFrame({"kw": kws,
"count": count
})
#p = Bar(df, 'kw')
df.sort("count", inplace=True)
df.set_index("kw", inplace=True)
series = df['count']
p = figure(width=1000, height=1000, y_range=series.index.tolist())
j = 1
for k, v in series.iteritems():
w = v / 2 * 2
p.rect(x=v/2,
y=j,
width=w,
height=0.4,
color=(76, 114, 176),
width_units="data",
height_units="data"
)
j += 1
script, div = components(p)
page = render_template('test.html', div=div, script=script)
return encode_utf8(page)
if __name__ == "__main__":
app.run(debug=True,
threaded=False
)
|
mit
| -600,367,166,198,424,400
| 22.666667
| 70
| 0.514085
| false
| 3.55
| false
| false
| false
|
stpeter/memberbot
|
sleekxmpp/plugins/xep_0009.py
|
1
|
7589
|
"""
XEP-0009 XMPP Remote Procedure Calls
"""
from __future__ import with_statement
import base
import logging
from xml.etree import cElementTree as ET
import copy
import time
import base64
def py2xml(*args):
params = ET.Element("params")
for x in args:
param = ET.Element("param")
param.append(_py2xml(x))
params.append(param) #<params><param>...
return params
def _py2xml(*args):
for x in args:
val = ET.Element("value")
if type(x) is int:
i4 = ET.Element("i4")
i4.text = str(x)
val.append(i4)
if type(x) is bool:
boolean = ET.Element("boolean")
boolean.text = str(int(x))
val.append(boolean)
elif type(x) is str:
string = ET.Element("string")
string.text = x
val.append(string)
elif type(x) is float:
double = ET.Element("double")
double.text = str(x)
val.append(double)
elif type(x) is rpcbase64:
b64 = ET.Element("Base64")
b64.text = x.encoded()
val.append(b64)
elif type(x) is rpctime:
iso = ET.Element("dateTime.iso8601")
iso.text = str(x)
val.append(iso)
elif type(x) is list:
array = ET.Element("array")
data = ET.Element("data")
for y in x:
data.append(_py2xml(y))
array.append(data)
val.append(array)
elif type(x) is dict:
struct = ET.Element("struct")
for y in x.keys():
member = ET.Element("member")
name = ET.Element("name")
name.text = y
member.append(name)
member.append(_py2xml(x[y]))
struct.append(member)
val.append(struct)
return val
def xml2py(params):
vals = []
for param in params.findall('param'):
vals.append(_xml2py(param.find('value')))
return vals
def _xml2py(value):
if value.find('i4') is not None:
return int(value.find('i4').text)
if value.find('int') is not None:
return int(value.find('int').text)
if value.find('boolean') is not None:
return bool(value.find('boolean').text)
if value.find('string') is not None:
return value.find('string').text
if value.find('double') is not None:
return float(value.find('double').text)
if value.find('Base64') is not None:
return rpcbase64(value.find('Base64').text)
if value.find('dateTime.iso8601') is not None:
return rpctime(value.find('dateTime.iso8601'))
if value.find('struct') is not None:
struct = {}
for member in value.find('struct').findall('member'):
struct[member.find('name').text] = _xml2py(member.find('value'))
return struct
if value.find('array') is not None:
array = []
for val in value.find('array').find('data').findall('value'):
array.append(_xml2py(val))
return array
raise ValueError()
class rpcbase64(object):
def __init__(self, data):
#base 64 encoded string
self.data = data
def decode(self):
return base64.decodestring(data)
def __str__(self):
return self.decode()
def encoded(self):
return self.data
class rpctime(object):
def __init__(self,data=None):
#assume string data is in iso format YYYYMMDDTHH:MM:SS
if type(data) is str:
self.timestamp = time.strptime(data,"%Y%m%dT%H:%M:%S")
elif type(data) is time.struct_time:
self.timestamp = data
elif data is None:
self.timestamp = time.gmtime()
else:
raise ValueError()
def iso8601(self):
#return a iso8601 string
return time.strftime("%Y%m%dT%H:%M:%S",self.timestamp)
def __str__(self):
return self.iso8601()
class JabberRPCEntry(object):
def __init__(self,call):
self.call = call
self.result = None
self.error = None
self.allow = {} #{'<jid>':['<resource1>',...],...}
self.deny = {}
def check_acl(self, jid, resource):
#Check for deny
if jid in self.deny.keys():
if self.deny[jid] == None or resource in self.deny[jid]:
return False
#Check for allow
if allow == None:
return True
if jid in self.allow.keys():
if self.allow[jid] == None or resource in self.allow[jid]:
return True
return False
def acl_allow(self, jid, resource):
if jid == None:
self.allow = None
elif resource == None:
self.allow[jid] = None
elif jid in self.allow.keys():
self.allow[jid].append(resource)
else:
self.allow[jid] = [resource]
def acl_deny(self, jid, resource):
if jid == None:
self.deny = None
elif resource == None:
self.deny[jid] = None
elif jid in self.deny.keys():
self.deny[jid].append(resource)
else:
self.deny[jid] = [resource]
def call_method(self, args):
ret = self.call(*args)
class xep_0009(base.base_plugin):
def plugin_init(self):
self.xep = '0009'
self.description = 'Jabber-RPC'
self.xmpp.add_handler("<iq type='set'><query xmlns='jabber:iq:rpc' /></iq>", self._callMethod)
self.xmpp.add_handler("<iq type='result'><query xmlns='jabber:iq:rpc' /></iq>", self._callResult)
self.xmpp.add_handler("<iq type='error'><query xmlns='jabber:iq:rpc' /></iq>", self._callError)
self.entries = {}
self.activeCalls = []
def post_init(self):
self.xmpp['xep_0030'].add_feature('jabber:iq:rpc')
self.xmpp['xep_0030'].add_identity('automatition','rpc')
def register_call(self, method, name=None):
#@returns an string that can be used in acl commands.
with self.lock:
if name is None:
self.entries[method.__name__] = JabberRPCEntry(method)
return method.__name__
else:
self.entries[name] = JabberRPCEntry(method)
return name
def acl_allow(self, entry, jid=None, resource=None):
#allow the method entry to be called by the given jid and resource.
#if jid is None it will allow any jid/resource.
#if resource is None it will allow any resource belonging to the jid.
with self.lock:
if self.entries[entry]:
self.entries[entry].acl_allow(jid,resource)
else:
raise ValueError()
def acl_deny(self, entry, jid=None, resource=None):
#Note: by default all requests are denied unless allowed with acl_allow.
#If you deny an entry it will not be allowed regardless of acl_allow
with self.lock:
if self.entries[entry]:
self.entries[entry].acl_deny(jid,resource)
else:
raise ValueError()
def unregister_call(self, entry):
#removes the registered call
with self.lock:
if self.entries[entry]:
del self.entries[entry]
else:
raise ValueError()
def makeMethodCallQuery(self,pmethod,params):
query = self.xmpp.makeIqQuery(iq,"jabber:iq:rpc")
methodCall = ET.Element('methodCall')
methodName = ET.Element('methodName')
methodName.text = pmethod
methodCall.append(methodName)
methodCall.append(params)
query.append(methodCall)
return query
def makeIqMethodCall(self,pto,pmethod,params):
iq = self.xmpp.makeIqSet()
iq.set('to',pto)
iq.append(self.makeMethodCallQuery(pmethod,params))
return iq
def makeIqMethodResponse(self,pto,pid,params):
iq = self.xmpp.makeIqResult(pid)
iq.set('to',pto)
query = self.xmpp.makeIqQuery(iq,"jabber:iq:rpc")
methodResponse = ET.Element('methodResponse')
methodResponse.append(params)
query.append(methodResponse)
return iq
def makeIqMethodError(self,pto,id,pmethod,params,condition):
iq = self.xmpp.makeIqError(id)
iq.set('to',pto)
iq.append(self.makeMethodCallQuery(pmethod,params))
iq.append(self.xmpp['xep_0086'].makeError(condition))
return iq
def call_remote(self, pto, pmethod, *args):
pass
#calls a remote method. Returns the id of the Iq.
def _callMethod(self,xml):
pass
def _callResult(self,xml):
pass
def _callError(self,xml):
pass
|
gpl-2.0
| 3,609,349,652,676,770,300
| 25.798535
| 99
| 0.652919
| false
| 2.988972
| false
| false
| false
|
pearu/f2py
|
extgen/scalar_rules.py
|
1
|
17398
|
"""
TODO: npy_void
"""
from __future__ import absolute_import
import numpy
from .capi import sctypebits
c_char = dict(\
ctype = 'signed char',
init = ' = 0',
argument_format = 'b',
return_format = 'b',
argument_title = 'a python integer (converting to C signed char)',
return_title = 'a python integer (converting from C signed char)',
init_container = 'CDeclaration',
)
c_short = dict(\
ctype = 'short int',
init = ' = 0',
argument_format = 'h',
return_format = 'h',
argument_title = 'a python integer (converting to C short int)',
return_title = 'a python integer (converting from C short int)',
init_container = 'CDeclaration',
)
c_int = dict(\
ctype = 'int',
init = ' = 0',
argument_format = 'i',
return_format = 'i',
argument_title = 'a python integer (converting to C int)',
return_title = 'a python integer (converting from C int)',
init_container = 'CDeclaration',
)
c_long = dict(\
ctype = 'long',
init = ' = 0',
argument_format = 'l',
return_format = 'l',
argument_title = 'a python integer (converting to C long int)',
return_title = 'a python integer (converting from C long int)',
init_container = 'CDeclaration',
)
c_long_long = dict(\
ctype = 'PY_LONG_LONG',
init = ' = 0',
argument_format = 'L',
return_format = 'L',
argument_title = 'a python integer (converting to C PY_LONG_LONG)',
return_title = 'a python integer (converting from C PY_LONG_LONG)',
init_container = 'CDeclaration',
)
c_unsigned_char = dict(\
ctype = 'unsigned char',
init = ' = 0',
argument_format = 'B',
return_format = 'B',
argument_title = 'a python integer (converting to C unsigned char)',
return_title = 'a python integer (converting from C unsigned char)',
init_container = 'CDeclaration',
)
c_unsigned_short = dict(\
ctype = 'unsigned short int',
init = ' = 0',
argument_format = 'H',
return_format = 'H',
argument_title = 'a python integer (converting to C unsigned short int)',
return_title = 'a python integer (converting from C unsigned short int)',
init_container = 'CDeclaration',
)
c_unsigned_int = dict(\
ctype = 'unsigned int',
init = ' = 0',
argument_format = 'I',
return_format = 'I',
argument_title = 'a python integer (converting to C unsigned int)',
return_title = 'a python integer (converting from C unsigned int)',
init_container = 'CDeclaration',
)
c_unsigned_long = dict(\
ctype = 'unsigned long',
init = ' = 0',
argument_format = 'k',
return_format = 'k',
argument_title = 'a python integer (converting to C unsigned long int)',
return_title = 'a python integer (converting from C unsigned long int)',
init_container = 'CDeclaration',
)
c_unsigned_long_long = dict(\
ctype = 'unsigned PY_LONG_LONG',
init = ' = 0',
argument_format = 'K',
return_format = 'K',
argument_title = 'a python integer (converting to C unsigned PY_LONG_LONG)',
return_title = 'a python integer (converting from C unsigned PY_LONG_LONG)',
init_container = 'CDeclaration',
)
c_float = dict(\
ctype = 'float',
init = ' = 0.0',
argument_format = 'f',
return_format = 'f',
argument_title = 'a python floating point number (converting to C float)',
return_title = 'a python floating point number (converting from C float)',
init_container = 'CDeclaration',
)
c_double = dict(\
ctype = 'double',
init = ' = 0.0',
argument_format = 'd',
return_format = 'd',
argument_title = 'a python floating point number (converting to C double)',
return_title = 'a python floating point number (converting from C double)',
init_container = 'CDeclaration',
)
c_Py_complex = dict(\
ctype = 'Py_complex',
argument_format = 'D',
return_format = 'D',
init = ' = {0.0, 0.0}',
argument_title = 'a python complex number (converting to C Py_complex structure)',
return_title = 'a python complex number (converting from C Py_complex structure)',
init_container = 'CDeclaration',
)
c_Py_ssize_t = dict(\
ctype = 'Py_ssize_t',
argument_format = 'n',
return_format = 'n',
init = ' = 0',
argument_title = 'a python integer (converting to C Py_ssize_t)',
return_title = 'a python integer (converting from C Py_ssize_t)',
init_container = 'CDeclaration',
)
c_char1 = dict(\
ctype = 'char',
argument_format = 'c',
return_format = 'c',
init = " = '\\0'",
argument_title = 'a python character (converting to C char)',
return_title = 'a python character (converting from C char)',
init_container = 'CDeclaration',
)
c_const_char_ptr = dict(\
ctype = 'const char *',
argument_format = 'z',
return_format = 'z',
init = ' = NULL',
argument_title = 'a python string or Unicode or None object (converting to C const char *)',
return_title = 'a python string or None (converting from C char *)',
)
c_char_ptr = dict(\
ctype = 'char *',
argument_format = 'O&',
argument_converter = 'pyobj_to_char_ptr',
clean_argument_converter = 'clean_pyobj_to_char_ptr',
return_format = 'z',
init = ' = NULL',
argument_title = 'a python string (converting to C char *)',
return_title = 'a python string or None (converting from C char *)',
)
c_Py_UNICODE_ptr = dict(\
ctype = 'Py_UNICODE*',
argument_format ='u',
return_format = 'u',
init = ' = NULL',
argument_title = 'a python Unicode object (converting to C Py_UNICODE*)',
return_title = 'a python Unicode object or None (converting from C Py_UNICODE*)'
)
py_bool = dict(\
ctype = 'PyBoolObject*',
init = ' = NULL',
pyctype = 'PyBool_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python bool',
)
py_int = dict(\
ctype = 'PyObject*',
ctype_exact = 'PyIntObject*',
init = ' = NULL',
pyctype = 'PyInt_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python integer',
init_container = 'FromPyObj',
refcounted = True,
)
py_long = dict(\
ctype = 'PyLongObject*',
init = ' = NULL',
pyctype = 'PyLong_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python long integer'
)
py_float = dict(\
ctype = 'PyObject*',
init = ' = NULL',
pyctype = 'PyFloat_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python floating point number',
init_container = 'FromPyObj',
refcounted = True,
)
py_complex = dict(\
ctype = 'PyComplexObject*',
init = ' = NULL',
pyctype = 'PyComplex_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python complex number'
)
py_str = dict(\
ctype = 'PyStringObject*',
init = ' = NULL',
argument_format = 'S',
return_format = 'N',
title = 'a python string'
)
py_unicode = dict(\
ctype = 'PyUnicodeObject*',
init = ' = NULL',
argument_format = 'U',
return_format = 'N',
title = 'a python Unicode object'
)
py_buffer = dict(\
pyctype = 'PyBuffer_Type',
ctype = 'PyBufferObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python buffer')
py_tuple = dict(\
pyctype = 'PyTuple_Type',
ctype = 'PyTupleObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python tuple')
py_list = dict(\
pyctype = 'PyList_Type',
ctype = 'PyListObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python list')
py_dict = dict(\
pyctype = 'PyDict_Type',
ctype = 'PyDictObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python dictionary')
py_file = dict(\
pyctype = 'PyFile_Type',
ctype = 'PyFileObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python file object')
py_instance = dict(\
pyctype = 'PyInstance_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python instance object')
py_function = dict(\
pyctype = 'PyFunction_Type',
ctype = 'PyFunctionObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python function object')
py_method = dict(\
pyctype = 'PyMethod_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python instance method object')
py_module = dict(\
pyctype = 'PyModule_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python module object')
py_iter = dict(\
pyctype = 'PySeqIter_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python iterator')
py_property = dict(\
pyctype = 'PyProperty_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python property attribute')
py_slice = dict(\
pyctype = 'PySlice_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python slice object')
py_cell = dict(\
pyctype = 'PyCell_Type',
ctype = 'PyCellObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL')
py_generator = dict(\
pyctype = 'PyGen_Type',
ctype = 'PyGenObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL')
py_set = dict(\
pyctype = 'PySet_Type',
ctype = 'PySetObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python set object')
py_frozenset = dict(\
pyctype = 'PyFrozenSet_Type',
ctype = 'PySetObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python frozenset object')
py_cobject = dict(\
ctype = 'PyCObject*',
argument_format = 'O',
return_format = 'N',
init = ' = NULL',
title = 'a PyCObject object')
py_type = dict(\
pyctype = 'PyType_Type',
ctype = 'PyTypeObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python type object')
py_object = dict(\
ctype = 'PyObject*',
argument_format = 'O',
return_format = 'N',
init = ' = NULL',
title = 'a python object')
numeric_array = dict(\
pyctype = 'PyArray_Type',
ctype = 'PyArrayObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a Numeric array',
require_numeric = True,
)
numpy_ndarray = dict(\
pyctype = 'PyArray_Type',
ctype = 'PyArrayObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a numpy array',
require_numpy = True,
)
numpy_descr = dict(\
pyctype = 'PyArrayDescr_Type',
ctype = 'PyArray_Descr*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
)
numpy_ufunc = dict(\
pyctype = 'PyUFunc_Type',
ctype = 'PyUFuncObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a numpy universal function',
require_numpy = True,
)
numpy_iter = dict(\
pyctype = 'PyArrayIter_Type',
ctype = 'PyArrayIterObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
)
numpy_multiiter = dict(\
pyctype = 'PyArrayMultiIter_Type',
ctype = 'PyArrayMultiIterObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
)
npy_bool = dict(\
ctype = 'npy_bool',
init = ' = 0',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_bool',
return_format = 'O&',
return_converter = 'pyobj_from_npy_bool',
argument_title = 'a python truth value (converting to C npy_bool)',
return_title = 'a numpy bool',
require_numpy = True,
init_container = 'CDeclaration',
)
numpy_bool = dict(\
ctype = 'PyBoolScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_bool',
return_format = 'N',
require_numpy = True,
argument_title = 'a python bool (converting to C PyBoolScalarObject*)',
return_title = 'a numpy bool',
)
numpy_string = dict(\
ctype = 'PyStringScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_string',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyStringScalarObject*)',
return_title = 'a numpy string',
)
numpy_unicode = dict(\
ctype = 'PyUnicodeScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_unicode',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyUnicodeScalarObject*)',
return_title = 'a numpy unicode',
)
npy_string = dict(\
typedef = 'npy_string',
ctype = 'npy_string',
init = ' = {NULL, 0}',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_string',
clean_argument_converter = 'clean_pyobj_to_npy_string',
return_format = 'O&',
return_converter = 'pyobj_from_npy_string',
require_numpy = True,
argument_title = 'a python string (converting to C npy_string)',
return_title = 'a numpy string',
)
npy_unicode = dict(\
typedef = 'npy_unicode',
ctype = 'npy_unicode',
init = ' = {NULL, 0}',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_unicode',
clean_argument_converter = 'clean_pyobj_to_npy_unicode',
return_format = 'O&',
return_converter = 'pyobj_from_npy_unicode',
require_numpy = True,
argument_title = 'a python string (converting to C npy_unicode)',
return_title = 'a numpy unicode',
)
numpy_void = dict(\
ctype = 'PyVoidScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_void',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyVoidScalarObject*)',
return_title = 'a numpy void',
)
c_PY_LONG_LONG = c_long_long
c_unsigned_PY_LONG_LONG = c_unsigned_long_long
numpy_bool_ = numpy_bool
numpy_str_ = numpy_str = numpy_string0 \
= numpy_string_ = numpy_string
numpy_unicode0 = numpy_unicode_ = numpy_unicode
npy_str = npy_string
numpy_void0 = numpy_void
def _generate():
scalars = {}
for Cls_name, bits_list in list(sctypebits.items()):
if Cls_name=='Complex':
init = ' = {0.0, 0.0}'
t = 'complex'
elif Cls_name=='Float':
init = ' = 0.0'
t = 'floating point number'
else:
init = ' = 0'
t = 'integer'
for bits in bits_list:
n = Cls_name.lower() + str(bits)
Cls = Cls_name + str(bits)
ctype = 'npy_' + n
scalars[ctype] = dict(
ctype = ctype,
pycype = None,
init = init,
argument_format = 'O&',
argument_converter = 'pyobj_to_'+ctype,
return_format = 'O&',
return_converter = 'pyobj_from_'+ctype,
require_numpy = True,
argument_title = 'a python %s (converting to C %s)' % (t,ctype),
return_title = 'a numpy %s-bit %s' % (bits, t),
init_container = 'CDeclaration',
)
ctype = 'Py%sScalarObject*' % (Cls)
ctype_name = 'numpy_' + n
scalars[ctype_name] = dict(
ctype = ctype,
pyctype = None,
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_'+ctype_name,
return_format = 'N',
require_numpy = True,
argument_title = 'a python %s (converting to C %s)' % (t,ctype),
return_title = 'a numpy %s-bit %s' % (bits, t)
)
return scalars
for _k, _d in list(_generate().items()):
exec(_k + ' = _d')
npy_intp = eval('npy_'+numpy.intp.__name__)
npy_int_ = eval('npy_'+numpy.int_.__name__)
npy_float = eval('npy_'+numpy.float_.__name__)
npy_complex = eval('npy_'+numpy.complex_.__name__)
if 0:
array = dict(
c_int = dict(\
ctype='int*',
init=' = NULL',
title='a C int array',
input_title = 'a python integer sequence (converting to C int*)',
input_format = 'O',
input_object = '&%(varname)s_py',
input_frompyobj = dict(\
required = '%(varname)s_arr = PyArray_FROMANY(%(varname)s_py, NPY_INT, %(rank)s, %(rank)s, %(requirements)s);\n'
'if (%(varname)s_arr != NULL) {\n'
' %(varname)s = PyArray_DATA(%(varname)s_arr);',
),
input_cleanfrompyobj = dict(\
required = '} /*if (%(varname)s_arr != NULL)*/'
),
output_title = 'a python integer sequence (converting from C int*)',
output_format = 'N',
output_object = '%(varname)s_arr'
),
numpy_int8 = dict(\
ctype='npy_int8*',
init=' = NULL',
title='a C npy_int8 array'
)
)
|
bsd-3-clause
| 2,982,873,471,366,255,000
| 26.659777
| 118
| 0.575583
| false
| 3.410704
| false
| false
| false
|
iamRusty/RustyPE
|
18/try.py
|
1
|
1347
|
"""
===============================================================
Trial Program for PE 18
Goal: Find the greatest path-sum.
https://projecteuler.net/problem=18
Note: The program uses FILE IO
===============================================================
"""
_FILE_NAME = "data.pe"
def extract(fileName):
f = open(fileName, "r")
data = f.readlines()
number = []
for line in data:
row = line.split()
number.append(row)
count = 0
while (count < len(number)):
number[count] = [int(i) for i in number[count]]
count += 1
f.close()
return number
#Credits to Uziel Agub for introducing this method. I'm amazed
def maxPathSum(number):
numOfRows = len(number) - 1
count = numOfRows - 1
while (count > -1):
curCol = 0
while (curCol < len(number[count])):
if (number[count + 1][curCol] > number[count + 1][curCol + 1]):
number[count][curCol] += number[count + 1][curCol]
else:
number[count][curCol] += number[count + 1][curCol + 1]
curCol += 1
count -= 1
return number[0][0]
def main():
number = extract(_FILE_NAME)
answer = maxPathSum(number)
print(answer)
main()
|
mit
| -6,180,291,786,295,823,000
| 26.659574
| 75
| 0.472903
| false
| 3.826705
| false
| false
| false
|
frutik/formunculous
|
formunculous/models.py
|
1
|
18803
|
# This file is part of formunculous.
#
# formunculous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# formunculous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with formunculous. If not, see <http://www.gnu.org/licenses/>.
# Copyright 2009-2011 Carson Gee
from django.db import models
from django.contrib.auth.models import User, Group
from django.contrib.sites.models import Site
from django.contrib.contenttypes.models import ContentType
from django.contrib.localflavor.us.forms import USStateSelect
from django.contrib.localflavor.us.models import PhoneNumberField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
from django import forms
from django.conf import settings
from formunculous.widgets import *
from formunculous.storage import *
from formunculous import fields
import datetime
# Application Definition Models
class CurrentManager(models.Manager):
def current(self, **kwargs):
return self.get_query_set().filter(
start_date__lte=datetime.datetime.now(),
stop_date__gte=datetime.datetime.now(),
parent = None, sites=Site.objects.get_current(), **kwargs
)
def reviewer(self, user, **kwargs):
return self.get_query_set().filter( reviewers=user, email_only=False,
parent = None,
sites=Site.objects.get_current(),
**kwargs)
class ApplicationDefinition(models.Model):
# Site differentiation
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\"\
without having set the SITE_ID setting. Create a site\
in your database and set the SITE_ID setting to fix\
this error.")
sites = models.ManyToManyField(Site, default=(sid,))
# Add an optional recursive relation to enable creating sub forms
# to create one-to-many relations for applications
parent = models.ForeignKey('self', null=True, blank=True)
name = models.CharField(max_length=150)
owner = models.EmailField(_('Owner E-mail'))
notify_owner = models.BooleanField(help_text="Email the owner each time \
an application is submitted")
slug = models.SlugField(_('slug'),unique=True)
description = models.TextField(blank=True)
start_date = models.DateTimeField(default=datetime.datetime.now(),
help_text=_("The date the application \
will first be visible to user."))
stop_date = models.DateTimeField(default=datetime.datetime.now(),
help_text=_("The date the application \
will no longer be available to be \
filled out"))
authentication = models.BooleanField(help_text=_("Require the applicant \
to authenticate before using the application?"))
# New in 1.2 (requires sql patch for existing installs)
authentication_multi_submit = models.BooleanField(
"Multiple Submissions (Authenticated)",
help_text="Allow an authenticated user to submit multiple forms\
(applicable only if the form requires authentication")
reviewers = models.ManyToManyField(User, null=True, blank=True)
notify_reviewers = models.BooleanField(help_text="Email every reviewer each\
time an application is submitted")
email_only = models.BooleanField(help_text=_("If checked, completed \
applications will not be stored in the database but \
emailed to the owner/reviewers (dependent on whether \
those notification flags are set"))
objects = CurrentManager()
class Meta:
ordering = ["stop_date"]
def __unicode__(self):
return( u'%s' % self.name )
def is_active(self):
now = datetime.datetime.now()
if now >= self.stop_date or now <= self.start_date:
return False
else:
return True
class SubApplicationDefinition(models.Model):
# Get subform name and slug from the ApplicationDefinition
app_definition = models.ForeignKey(ApplicationDefinition)
min_entries = models.IntegerField(default = 0,
help_text = _("The minimum number of\
instances of this sub-form the\
user must fill out. 0 if none."))
max_entries = models.IntegerField(default = 0,
help_text = _("The maximum number of\
instances of this sub-form the\
user must fill out. 0 if\
there is no limit."))
extras = models.IntegerField(default = 3,
help_text = _("The number of extra forms to\
show by default on an\
application"))
# Application data types/fields
class FieldDefinition(models.Model):
"""
The base model for data type field definitions.
"""
field_types = ()
type = models.CharField(_('Type'),max_length=250,)
application = models.ForeignKey(ApplicationDefinition)
pre_text = models.TextField(blank = True,
help_text=_("The html here is prepended \
to the form field."))
post_text = models.TextField(blank = True,
help_text=_("The html here is appended \
to the form field."))
page = models.IntegerField(default=1)
order = models.IntegerField()
group = models.BooleanField(default=False,
help_text=_("Group this with nearby\
fields using an indented and \
colored background."))
label = models.CharField(max_length=250)
slug = models.SlugField()
help_text = models.TextField(blank = True,
help_text=_("The text here is added \
to the defined field to help the \
user understand its purpose."))
require = models.BooleanField(default=True,
help_text=_("This requires that value be \
entered for this field on \
the application form."))
reviewer_only = models.BooleanField(help_text=_("Make this field viewable\
only to the reviewer of an application, not the applicant."))
header = models.BooleanField(default=True,
help_text=_("If this is set to true the field\
will be used as a header in the\
reviewer view."))
# New in 1.2 (requires sql patch for existing installs)
multi_select = models.BooleanField(_("Allow Multiple Selections"),
help_text=_("If selected, the user\
will be allowed to check multiple\
options from dropdown selections. Be\
careful about which field type this is\
used for (generally only use large\
text area fields)."))
use_radio = models.BooleanField(_("Use Radio Buttons"),
help_text=_("Dropdown selections\
will be represented with radio buttons\
for single select dropdowns and\
check boxes for multi-select.\
"))
class Meta:
ordering = ['page', 'order']
def __unicode__(self):
return( u'%s.%s: %s' % (self.page, self.order, self.label) )
class DropDownChoices(models.Model):
field_definition = models.ForeignKey(FieldDefinition)
text = models.CharField(max_length = 255)
value = models.CharField(max_length = 255)
class Meta:
ordering = ['value']
# Instance Models (field and application)
class Application(models.Model):
# Add an optional recursive relation to enable creating sub forms
# to create one-to-many relations for applications
parent = models.ForeignKey('self', null=True, blank=True)
user = models.ForeignKey(User, blank=True, null=True)
submission_date = models.DateTimeField(null=True, blank=True)
app_definition = models.ForeignKey(ApplicationDefinition)
def get_field_values(self, reviewer_fields=False, all_fields=False):
"""
Returns a collection of dictionary objects with the field names
and their values.
By default this does not include the reviewer only fields that
are in the application definition. To get those, pass True
into the function.
"""
# Introspect model namespace
import formunculous.models as funcmodels
fields = []
if not all_fields:
field_set = self.app_definition.fielddefinition_set.filter(
reviewer_only=reviewer_fields)
else:
field_set = self.app_definition.fielddefinition_set.all()
for field_def in field_set:
field_model = getattr(funcmodels, field_def.type)
try:
field_val = field_model.objects.get( app = self,
field_def = field_def)
field_dict = {'label': field_def.label,
'data': field_val.value,
'type': field_def.type,
'slug': field_def.slug,
'multiselect': field_def.multi_select },
except:
field_dict = {'label': field_def.label, 'data': None,
'type': field_def.type },
fields += field_dict
return fields
def get_field_value(self, field_slug):
"""
Gets the value of the field defined by the slug given for this
application instance, or returns None if either the value
or the field definition is not found.
"""
# Introspect model namespace
import formunculous.models as funcmodels
try:
field_def = FieldDefinition.objects.get(
slug=field_slug,
application=self.app_definition)
except FieldDefinition.DoesNotExist:
return None
field_model = getattr(funcmodels, field_def.type)
try:
field_val = field_model.objects.get( app = self,
field_def=field_def )
except field_model.DoesNotExist:
return None
return field_val.value
# Define empty permission model for using builder
class Form(models.Model):
class Meta:
permissions = (
("can_delete_applications", "Can delete applications"),
)
class BaseField(models.Model):
"""
This is the base model for all field types Each unique field type
must extend this model for the field to work properly.
"""
name = 'Base'
icone = None
field_def = models.ForeignKey(FieldDefinition)
app = models.ForeignKey(Application)
allow_dropdown = False
class TextField(BaseField):
"""
This is max length (most DBs) generic text field that has no
input restrictions.
"""
FieldDefinition.field_types+=('TextField','Text Input',),
name = 'Text Input'
icon = 'formunculous/img/field_icons/text_input.png'
value = models.CharField(max_length=255, blank=True, null=True)
widget = None
allow_dropdown = True
class TextArea(BaseField):
"""
This is the large text area field.
"""
FieldDefinition.field_types+=('TextArea', 'Large Text Area',),
name= "Large Text Area"
icon = 'formunculous/img/field_icons/text_area.png'
value = models.TextField(blank=True, null=True)
widget = None
allow_dropdown = True
class BooleanField(BaseField):
"""
A simple yes/no field.
"""
FieldDefinition.field_types+=('BooleanField', 'Yes/No Question',),
name = "Yes/No Question/Checkbox"
icon = 'formunculous/img/field_icons/yes_no.png'
value = models.BooleanField(blank=True, default=False)
widget = None
allow_dropdown = False
class EmailField(BaseField):
"""
Builtin email field
"""
FieldDefinition.field_types+=('EmailField', 'Email Address',),
name = "Email Address"
icon = 'formunculous/img/field_icons/email.png'
value = models.EmailField(blank=True, null=True)
widget = None
allow_dropdown = True
class USPhoneNumber(BaseField):
FieldDefinition.field_types+=('USPhoneNumber', 'U.S. Phone Number',),
name = "U.S. Phone Number"
icon = 'formunculous/img/field_icons/phone.png'
value = PhoneNumberField(null=True, blank=True)
widget = None
allow_dropdown = True
class USStateField(BaseField):
"""
Provides a dropdown selection of U.S. States and
provinces.
"""
FieldDefinition.field_types+=('USStateField', 'U.S. States',),
name = "U.S. States"
icon = 'formunculous/img/field_icons/usstate.png'
value = models.CharField(null=True, blank=True,
max_length="255")
widget = OptionalStateSelect
allow_dropdown = False
class USZipCodeField(BaseField):
FieldDefinition.field_types+=('USZipCodeField', 'U.S. Zipcode',),
name = "U.S. Zipcode"
icon = 'formunculous/img/field_icons/zipcode.png'
value = fields.USZipCodeModelField(null=True, blank=True)
widget = None
allow_dropdown = True
class DateField(BaseField):
"""
Uses a nice jquery widget for selecting a date.
"""
FieldDefinition.field_types+=('DateField', 'Date Input',),
name = "Date Input"
icon = 'formunculous/img/field_icons/date.png'
value = models.DateField(blank=True, null=True)
widget = DateWidget
allow_dropdown = True
class FloatField(BaseField):
"""
Float field. Accepts any decimal number basically
"""
FieldDefinition.field_types+=('FloatField', 'Decimal Number',),
name = "Decimal Number Field"
icon = 'formunculous/img/field_icons/decimal.png'
value = models.FloatField(blank=True, null=True)
widget = None
allow_dropdown = True
class IntegerField(BaseField):
"""
Integer field. Accepts any whole number + or -
"""
FieldDefinition.field_types+=('IntegerField', 'Whole Number',),
name = "Whole Number Field"
icon = 'formunculous/img/field_icons/wholenumber.png'
value = models.IntegerField(blank=True, null=True)
widget = None
allow_dropdown = True
class PositiveIntegerField(BaseField):
"""
Integer field. Accepts any whole number that is positive
"""
FieldDefinition.field_types+=('PositiveIntegerField',
'Positive Whole Number',),
name = "Positive Whole Number Field"
icon = 'formunculous/img/field_icons/positivewhole.png'
value = models.PositiveIntegerField(blank=True, null=True)
widget = None
allow_dropdown = True
class URLField(BaseField):
"""
URL field. Accepts any valid URL
"""
FieldDefinition.field_types+=('URLField', 'URL',),
name = "URL"
icon = 'formunculous/img/field_icons/url.png'
value = models.URLField(blank=True, null=True)
widget = None
allow_dropdown = True
class IPAddressField(BaseField):
"""
IP address field field. Accepts any valid IPv4 address.
"""
FieldDefinition.field_types+=('IPAddressField', 'IP Address',),
name = "IP Address"
icon = 'formunculous/img/field_icons/ipaddress.png'
value = models.IPAddressField(blank=True, null=True)
widget = None
allow_dropdown = True
# File Based Fields
class FileField(BaseField):
"""
This field accepts any file, regardless of type, and size
is limited by the Django settings
"""
FieldDefinition.field_types+=('FileField','File Upload',),
name = 'File Upload'
icon = 'formunculous/img/field_icons/file.png'
value = models.FileField(upload_to=upload_to_path,
storage=ApplicationStorage(),
blank=True, null=True,max_length=100)
widget = FileWidget
allow_dropdown = False
class ImageField(BaseField):
"""
This is a file field that only accepts common image formats.
"""
FieldDefinition.field_types+=('ImageField','Picture Upload',),
name = 'Picture Upload'
icon = 'formunculous/img/field_icons/picture.png'
value = models.ImageField(upload_to=upload_to_path,
storage=ApplicationStorage(),
blank=True, null=True, max_length=100)
widget = FileWidget
allow_dropdown = False
class DocumentField(BaseField):
"""
Validates common document mime-types/extensions
"""
FieldDefinition.field_types+=('DocumentField', 'Document Upload',),
name = "Document Upload"
icon = 'formunculous/img/field_icons/document.png'
value = fields.DocumentField(upload_to=upload_to_path,
storage=ApplicationStorage(),
blank=True, null=True, max_length=100)
widget = FileWidget
allow_dropdown = False
|
gpl-3.0
| -5,708,087,654,961,886,000
| 32.516934
| 81
| 0.586236
| false
| 4.614233
| false
| false
| false
|
f0rki/cb-multios
|
original-challenges/CGC_File_System/support/genpolls.py
|
1
|
4100
|
import struct
import time
import sys
import random
import string
def randString( z ):
s = ''
for i in range(z):
s += random.choice( string.lowercase + string.uppercase + string.digits)
return s
def delfile( dirs ):
### randomly select a directory then pull the file to remove
fl = ''
cnt = 0
while fl == '' and cnt < 30:
z = random.choice(dirs)
cnt += 1
if len(z[1]) == 0:
continue
f = random.choice(z[1])
i = z[1].index(f)
del z[1][i]
fl = f[0]
if fl == '':
return ''
data = '\t\t<write echo="ascii"><data>REPO\\x%.2x%s</data></write>\n' %(len(fl), fl)
i = fl.rfind('/')
fl = fl[i+1:]
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] %s removed\\n</data></match></read>\n' %(fl)
return data
def addfile( dirs ):
### Select a base dir to add the file
td = random.choice( dirs )
base_dir = td[0]
maxlen = 10
if (254 - len(base_dir)) < 10:
return ''
n = randString( random.randint(4, 10) )
newfile = base_dir + '/' + n
fdata = randString( random.randint(20,100) )
z = 'SEND\\x%.2x%s' %(len(newfile), newfile)
z += '\\x%.2x\\x%.2x%s' %( len(fdata)&0xff, (len(fdata)>>8)&0xff, fdata )
data = '\t\t<write echo="ascii"><data>%s</data></write>\n' %(z)
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] File received: %s\\n</data></match></read>\n' %(newfile)
td[1].append( [newfile, fdata] )
return data
def getFile( dirs ):
### Select a base dir with a file
t = []
cnt = 0
while t == [] and cnt < 30:
z = random.choice( dirs )
if len(z[1]) != 1:
t = z[1]
break
cnt += 1
if t == []:
return ''
fl = random.choice(t)
req = 'RECV\\x%.2x%s' %( len(fl[0]), fl[0] )
data = '\t\t<write echo="ascii"><data>%s</data></write>\n' %(req)
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[DATA] %s\\n</data></match></read>\n' %(fl[1])
return data
def addDir( dirs ):
### Select a base dir to add
td = random.choice( dirs )
base_dir = td[0]
maxlen = 10
if (254 - len(base_dir)) < 10:
return ''
n = randString( random.randint(4, 10) )
newdir = base_dir + '/' + n
z = 'NDIR\\x%.2x%s' %(len(newdir), newdir)
data = '\t\t<write echo="ascii"><data>' + z + '</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Added new directory\\n</data></match></read>\n'
dirs.append( [ newdir, [] ] )
return data
def addPrint( dirs ):
c = []
for x in dirs:
for y in x[1]:
c.append( [ y[0], 'File', str(len(y[1]))] )
if x[0] == '':
continue
c.append( [x[0], 'Dir', '' ])
c.sort(key=lambda x: x[0])
data = '\t\t<write echo="ascii"><data>PRNT</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Listing files\\n</data></match></read>\n'
for y in c:
if y[1] == 'File':
l = ' ' + y[2]
else:
l = ''
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>%s %s%s\\n</data></match></read>\n' %(y[0], y[1], l)
return data
def addStop():
data = '\t\t<write echo="ascii"><data>STOP</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Terminating\\n</data></match></read>\n'
return data
def newpoll( ind ):
dir_structure = [ ['', [] ] ]
data = '<?xml version="1.0" standalone="no" ?>\n'
data += '<!DOCTYPE pov SYSTEM "/usr/share/cgc-replay/replay.dtd">\n'
data += '<pov>\n'
data += '\t<cbid>CROMU_00033</cbid>\n'
data += '\t<replay>\n'
flist = [addDir, addfile, getFile]
repo = 0
for i in range(0, random.randint(105, 200)):
if i == 100:
flist.append(delfile)
b = random.choice( flist )
if b == delfile:
repo = 1
data += b( dir_structure )
### Only add this in if a file has been not been deleted
if repo == 0:
data += addPrint( dir_structure )
data += addStop()
data += '\t</replay>\n'
data += '</pov>\n'
f = open("./poller/for-testing/POLL%.5d.xml" %(ind), "wb")
f.write(data)
f.close()
if __name__ == '__main__':
seed = time.time()
if len(sys.argv) == 2:
seed = int(sys.argv[1])
random.seed(seed)
for i in range(1, 1000):
newpoll(i)
|
mit
| 6,480,812,888,750,274,000
| 19.707071
| 127
| 0.565366
| false
| 2.484848
| false
| false
| false
|
alwayssayyes/incrediblesy
|
app/views.py
|
1
|
9016
|
from flask import render_template, Flask, request, redirect, url_for, current_app
from app import app
from urllib2 import urlopen
from bs4 import BeautifulSoup
from flaskext import wtf
from flaskext.wtf import Form, TextField, TextAreaField, SubmitField, validators, ValidationError
from google.appengine.ext import db
from database import Member
class Photo(db.Model):
photo = db.BlobProperty()
class Tweet(db.Model):
photo = db.BlobProperty()
text = db.StringProperty()
class ContactForm(Form):
name = TextField("Name", [validators.Required("Please enter your name.")])
email = TextField(
"Email", [validators.Required("Please enter your email address."), validators.Email("Please enter valid email address.")])
subject = TextField(
"Subject", [validators.Required("Please enter a subject.")])
message = TextAreaField(
"Message", [validators.Required("Please enter a message.")])
submit = SubmitField("Send")
@app.route('/', methods=["GET", "POST"])
def main():
form = None
return render_template('photo.html', form=form)
@app.route('/tweet', methods=["GET", "POST"])
def tweet():
if request.method == 'POST':
post_data = request.files.get('photo')
filestream = post_data.read()
post_dataa = request.form.get('text')
upload_data = Tweet()
upload_data.photo = db.Blob(filestream)
upload_data.text = post_dataa
upload_data.put()
datalist=Tweet.all()
url = url_for("shows", key=upload_data.key())
return render_template("photo.html", texta=post_dataa, url=url, Tweet=datalist)
else:
return render_template('photo.html')
@app.route('/upload', methods=["GET", "POST"])
def upload_db():
if request.method == 'POST':
post_data = request.files.get('photo')
filestream =post_data.read()
upload_data =Photo()
upload_data.photo =db.Blob(filestream)
upload_data.put()
url = url_for("shows", key=upload_data.key())
return render_template("photo.html", url=url)
else:
return render_template('photo.html')
@app.route('/show/<key>', methods=['GET'])
def shows(key):
uploaded_data = db.get(key)
return current_app.response_class(
uploaded_data.photo)
@app.route('/vali', methods=["GET", "POST"])
def vali():
form = ContactForm()
if request.method == 'POST':
if not form.validate():
return render_template('vali.html', form=form)
else:
return "Nice to meet you," + form.name.data + "!"
return render_template('vali.html', form=form)
# class news(Form):
# category = TextField("category", [validators.Required("Please enter another one")])
# submit = SubmitField("Send")
# @app.route('/crawlhw', methods=["GET", "POST"])
# def crawlhw():
# form = news()
# if request.method == 'POST':
# if not form.validate():
# return render_template('crawlhomework.html' )
# @app.route('/lotto', methods=['GET'])
# def lotto():
# if request.method == 'GET':
# if request.args.get('getlotto'):
# html = urlopen(
# 'http://www.nlotto.co.kr/lotto645Confirm.do?method=allWin').read()
# bs = BeautifulSoup(html)
# trs = bs.select('.tblType1 > tbody > tr')
# lottos = []
# for i in trs:
# tds = i.select('td')
# if len(tds) > 1:
# lotto = str(i.select('td')[1].get_text())
# lotto = lotto.split(', ')
# lottos.append(lotto)
# return render_template('haha.html', lottos=lotto)
# return render_template('haha.html')
# @app.route('/uploaddb', methods=['GET'])
# def uploaddb():
# return 0
# # @app.route('/')
# # def test():
# # return render_template('getpost.html')
# @app.route('/index')
# def index():
# return render_template('index.html')
# @app.route('/gugu', methods=['get'])
# def gugu():
# if request.method == 'GET':
# danstart = request.args.get('danstart')
# danend = request.args.get('danend')
# if danstart and danend:
# gugudan = []
# for dan in range(int(danstart), int(danend) + 1):
# for i in range(1, 10):
# gugudan.append(
# str(dan) + "x" + str(i) + "=" + str(dan + i))
# return render_template('haha.html', gugudan=gugudan)
# return render_template('haha.html')
# @app.route('/cal', methods=['GET', 'POST'])
# def cal():
# if request.method == 'POST':
# x = request.form.get('x')
# op = request.form.get('op')
# y = request.form.get('y')
# if x and op and y:
# result = 0
# if op == '+':
# result = int(x) + int(y)
# return render_template('haha.html', calresult=result)
# elif op == '-':
# result = int(x) - int(y)
# return render_template('haha.html', calresult=result)
# elif op == '*':
# result = int(x) * int(y)
# return render_template('haha.html', calresult=result)
# elif op == '/':
# result = float(x) / float(y)
# return render_template('haha.html', calresult=result)
# return render_template('haha.html')
# @app.route('/what', methods=['GET'])
# def what():
# if request.method == 'GET':
# if request.args.get('news'):
# pass
# @app.route('/')
# @app.route('/index')
# def index():
# return render_template("photo.html")
# @app.route('/practice', methods=["GET", "POST"])
# def practice():
# if request.method == 'POST':
# post_data = request.files.get('photo')
# filestream = post_data.read()
# post_dataa = request.form.get('text')
# upload_data = Database()
# upload_data.photo = db.Blob(filestream)
# upload_data.text = post_dataa
# upload_data.put()
# datalist=Database.all()
# url = url_for("shows", key=upload_data.key())
# return render_template("photo.html", texta=post_dataa, Database=datalist)
# else:
# return render_template('photo.html')
# @app.route('/show/<key>', methods=['GET'])
# def shows(key):
# uploaded_data = db.get(key)
# return current_app.response_class(
# uploaded_data.photo)
# @app.route('/', methods=['GET','POST'])
# def show_entries():
# members = Member.all()
# return render_template("practice.html", members=members)
# @app.route('/add_entry', methods=['POST'])
# def add_entry():
# userId = request.form['id']
# userPasswd = request.form['passwd']
# for member in Member.all():
# if userId == member.getId():
# return "failed"
# member = Member(userId=userId, userPasswd=userPasswd)
# member.put()
# return "OK"
# @app.route('/getpost', methods=['GET', 'POST'])
# def getpost():
# get=None
# post=None
# if request.args:
# get = request.args.get['getget']
# if request.form:
# post = request.form['postpost']
# return render_template('getpost.html',
# get_variable = get, post_variable = post)
# )
# @app.route('/crawl', methods=['GET','POST'])
# def crawl():
# if request.method == 'POST' and request.form:
# address = request.form.get('crawl')
# htmltext = urllib.urlopen(address).read()
# soup = BeautifulSoup(htmltext, from_encoding="utf-8")
# result=[]
# for tag in soup.select(".title"):
# result.append(tag.get_text())
# return render_template('getpost.html', result=result)
# else:
# return render_template('getpost.html')
# @app.route('/yeahyeah')
# def ohyeah():
# return render_template('iamsoyoung.html')
# @app.route('/getpost',methods=['GET','POST'])
# def getpost():
# get_value=None
# if request.method=='GET':
# if 'getget' in request.args:
# get_value=request.args.get('getget')
# if get_value != 'http://pgr21.com/pb/pb.php?id=freedom':
# return render_template('listshow.html',error='URL not found')
# htmltext = urllib.urlopen(get_value).read()
# soup = BeautifulSoup(htmltext, from_encoding="utf-8")
# authors = []
# for tag in soup.select(".tdname"):
# authors.append(tag.get_text())
# return render_template('listshow.html',
# list=authors)
# return render_template('getpost.html')
# @app.route('/getpost2',methods=['GET','POST'])
# def getpost2():
# get_value=None
# if request.method=='POST':
# if request.form and ('postpost' in request.form):
# get_value=request.form['postpost']
# htmltext = urllib.urlopen(get_value).read()
# soup = BeautifulSoup(htmltext, from_encoding="utf-8")
# authors = []
# for tag in soup.select(".tdname"):
# authors.append(tag.get_text())
# return render_template('listshow.html',
# list=authors)
# return render_template('getpost.html')
|
apache-2.0
| 86,643,763,824,553,280
| 28.371336
| 130
| 0.581522
| false
| 3.269036
| false
| false
| false
|
auduny/home-assistant
|
tests/components/google_assistant/test_trait.py
|
1
|
41902
|
"""Tests for the Google Assistant traits."""
from unittest.mock import patch, Mock
import pytest
from homeassistant.components import (
binary_sensor,
camera,
cover,
fan,
input_boolean,
light,
lock,
media_player,
scene,
script,
switch,
vacuum,
group,
)
from homeassistant.components.climate import const as climate
from homeassistant.components.google_assistant import (
trait, helpers, const, error)
from homeassistant.const import (
STATE_ON, STATE_OFF, ATTR_ENTITY_ID, SERVICE_TURN_ON, SERVICE_TURN_OFF,
TEMP_CELSIUS, TEMP_FAHRENHEIT, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE,
ATTR_DEVICE_CLASS, ATTR_ASSUMED_STATE, STATE_UNKNOWN)
from homeassistant.core import State, DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE
from homeassistant.util import color
from tests.common import async_mock_service, mock_coro
BASIC_CONFIG = helpers.Config(
should_expose=lambda state: True,
)
REQ_ID = 'ff36a3cc-ec34-11e6-b1a0-64510650abcf'
BASIC_DATA = helpers.RequestData(
BASIC_CONFIG,
'test-agent',
REQ_ID,
)
PIN_CONFIG = helpers.Config(
should_expose=lambda state: True,
secure_devices_pin='1234'
)
PIN_DATA = helpers.RequestData(
PIN_CONFIG,
'test-agent',
REQ_ID,
)
async def test_brightness_light(hass):
"""Test brightness trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert trait.BrightnessTrait.supported(light.DOMAIN,
light.SUPPORT_BRIGHTNESS, None)
trt = trait.BrightnessTrait(hass, State('light.bla', light.STATE_ON, {
light.ATTR_BRIGHTNESS: 243
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'brightness': 95
}
events = []
hass.bus.async_listen(EVENT_CALL_SERVICE, events.append)
calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON)
await trt.execute(
trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA,
{'brightness': 50}, {})
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_BRIGHTNESS_PCT: 50
}
assert len(events) == 1
assert events[0].data == {
'domain': 'light',
'service': 'turn_on',
'service_data': {'brightness_pct': 50, 'entity_id': 'light.bla'}
}
async def test_brightness_media_player(hass):
"""Test brightness trait support for media player domain."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.BrightnessTrait.supported(media_player.DOMAIN,
media_player.SUPPORT_VOLUME_SET,
None)
trt = trait.BrightnessTrait(hass, State(
'media_player.bla', media_player.STATE_PLAYING, {
media_player.ATTR_MEDIA_VOLUME_LEVEL: .3
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'brightness': 30
}
calls = async_mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET)
await trt.execute(
trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA,
{'brightness': 60}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'media_player.bla',
media_player.ATTR_MEDIA_VOLUME_LEVEL: .6
}
async def test_camera_stream(hass):
"""Test camera stream trait support for camera domain."""
hass.config.api = Mock(base_url='http://1.1.1.1:8123')
assert helpers.get_google_type(camera.DOMAIN, None) is not None
assert trait.CameraStreamTrait.supported(camera.DOMAIN,
camera.SUPPORT_STREAM, None)
trt = trait.CameraStreamTrait(
hass, State('camera.bla', camera.STATE_IDLE, {}), BASIC_CONFIG
)
assert trt.sync_attributes() == {
'cameraStreamSupportedProtocols': [
"hls",
],
'cameraStreamNeedAuthToken': False,
'cameraStreamNeedDrmEncryption': False,
}
assert trt.query_attributes() == {}
with patch('homeassistant.components.camera.async_request_stream',
return_value=mock_coro('/api/streams/bla')):
await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {})
assert trt.query_attributes() == {
'cameraStreamAccessUrl': 'http://1.1.1.1:8123/api/streams/bla'
}
async def test_onoff_group(hass):
"""Test OnOff trait support for group domain."""
assert helpers.get_google_type(group.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(group.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('group.bla', STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('group.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'group.bla',
}
off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'group.bla',
}
async def test_onoff_input_boolean(hass):
"""Test OnOff trait support for input_boolean domain."""
assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('input_boolean.bla', STATE_ON),
BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('input_boolean.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'input_boolean.bla',
}
off_calls = async_mock_service(hass, input_boolean.DOMAIN,
SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'input_boolean.bla',
}
async def test_onoff_switch(hass):
"""Test OnOff trait support for switch domain."""
assert helpers.get_google_type(switch.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('switch.bla', STATE_ON),
BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('switch.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'switch.bla',
}
off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'switch.bla',
}
async def test_onoff_fan(hass):
"""Test OnOff trait support for fan domain."""
assert helpers.get_google_type(fan.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('fan.bla', STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('fan.bla', STATE_OFF), BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'fan.bla',
}
off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'fan.bla',
}
async def test_onoff_light(hass):
"""Test OnOff trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(light.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('light.bla', STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('light.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
}
off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
}
async def test_onoff_media_player(hass):
"""Test OnOff trait support for media_player domain."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('media_player.bla', STATE_ON),
BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('media_player.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'media_player.bla',
}
off_calls = async_mock_service(hass, media_player.DOMAIN,
SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'media_player.bla',
}
async def test_onoff_climate(hass):
"""Test OnOff trait not supported for climate domain."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.OnOffTrait.supported(
climate.DOMAIN, climate.SUPPORT_ON_OFF, None)
async def test_dock_vacuum(hass):
"""Test dock trait support for vacuum domain."""
assert helpers.get_google_type(vacuum.DOMAIN, None) is not None
assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None)
trt = trait.DockTrait(hass, State('vacuum.bla', vacuum.STATE_IDLE),
BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'isDocked': False
}
calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_RETURN_TO_BASE)
await trt.execute(
trait.COMMAND_DOCK, BASIC_DATA, {}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
async def test_startstop_vacuum(hass):
"""Test startStop trait support for vacuum domain."""
assert helpers.get_google_type(vacuum.DOMAIN, None) is not None
assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None)
trt = trait.StartStopTrait(hass, State('vacuum.bla', vacuum.STATE_PAUSED, {
ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {'pausable': True}
assert trt.query_attributes() == {
'isRunning': False,
'isPaused': True
}
start_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_START)
await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {'start': True}, {})
assert len(start_calls) == 1
assert start_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
stop_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_STOP)
await trt.execute(
trait.COMMAND_STARTSTOP, BASIC_DATA, {'start': False}, {})
assert len(stop_calls) == 1
assert stop_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
pause_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_PAUSE)
await trt.execute(
trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {'pause': True}, {})
assert len(pause_calls) == 1
assert pause_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
unpause_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_START)
await trt.execute(
trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {'pause': False}, {})
assert len(unpause_calls) == 1
assert unpause_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
async def test_color_setting_color_light(hass):
"""Test ColorSpectrum trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(light.DOMAIN,
light.SUPPORT_COLOR, None)
trt = trait.ColorSettingTrait(hass, State('light.bla', STATE_ON, {
light.ATTR_HS_COLOR: (20, 94),
light.ATTR_BRIGHTNESS: 200,
ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'colorModel': 'hsv'
}
assert trt.query_attributes() == {
'color': {
'spectrumHsv': {
'hue': 20,
'saturation': 0.94,
'value': 200 / 255,
}
}
}
assert trt.can_execute(trait.COMMAND_COLOR_ABSOLUTE, {
'color': {
'spectrumRGB': 16715792
}
})
calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'spectrumRGB': 1052927
}
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_HS_COLOR: (240, 93.725),
}
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'spectrumHSV': {
'hue': 100,
'saturation': .50,
'value': .20,
}
}
}, {})
assert len(calls) == 2
assert calls[1].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_HS_COLOR: [100, 50],
light.ATTR_BRIGHTNESS: .2 * 255,
}
async def test_color_setting_temperature_light(hass):
"""Test ColorTemperature trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(light.DOMAIN,
light.SUPPORT_COLOR_TEMP, None)
trt = trait.ColorSettingTrait(hass, State('light.bla', STATE_ON, {
light.ATTR_MIN_MIREDS: 200,
light.ATTR_COLOR_TEMP: 300,
light.ATTR_MAX_MIREDS: 500,
ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'colorTemperatureRange': {
'temperatureMinK': 2000,
'temperatureMaxK': 5000,
}
}
assert trt.query_attributes() == {
'color': {
'temperatureK': 3333
}
}
assert trt.can_execute(trait.COMMAND_COLOR_ABSOLUTE, {
'color': {
'temperature': 400
}
})
calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
with pytest.raises(helpers.SmartHomeError) as err:
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'temperature': 5555
}
}, {})
assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'temperature': 2857
}
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857)
}
async def test_color_light_temperature_light_bad_temp(hass):
"""Test ColorTemperature trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(light.DOMAIN,
light.SUPPORT_COLOR_TEMP, None)
trt = trait.ColorSettingTrait(hass, State('light.bla', STATE_ON, {
light.ATTR_MIN_MIREDS: 200,
light.ATTR_COLOR_TEMP: 0,
light.ATTR_MAX_MIREDS: 500,
}), BASIC_CONFIG)
assert trt.query_attributes() == {
}
async def test_scene_scene(hass):
"""Test Scene trait support for scene domain."""
assert helpers.get_google_type(scene.DOMAIN, None) is not None
assert trait.SceneTrait.supported(scene.DOMAIN, 0, None)
trt = trait.SceneTrait(hass, State('scene.bla', scene.STATE), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {}
assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {})
calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'scene.bla',
}
async def test_scene_script(hass):
"""Test Scene trait support for script domain."""
assert helpers.get_google_type(script.DOMAIN, None) is not None
assert trait.SceneTrait.supported(script.DOMAIN, 0, None)
trt = trait.SceneTrait(hass, State('script.bla', STATE_OFF), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {}
assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {})
calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {})
# We don't wait till script execution is done.
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'script.bla',
}
async def test_temperature_setting_climate_onoff(hass):
"""Test TemperatureSetting trait support for climate domain - range."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
assert trait.TemperatureSettingTrait.supported(
climate.DOMAIN, climate.SUPPORT_OPERATION_MODE, None)
hass.config.units.temperature_unit = TEMP_FAHRENHEIT
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
ATTR_SUPPORTED_FEATURES: (
climate.SUPPORT_OPERATION_MODE | climate.SUPPORT_ON_OFF |
climate.SUPPORT_TARGET_TEMPERATURE_HIGH |
climate.SUPPORT_TARGET_TEMPERATURE_LOW),
climate.ATTR_OPERATION_MODE: climate.STATE_COOL,
climate.ATTR_OPERATION_LIST: [
climate.STATE_COOL,
climate.STATE_HEAT,
climate.STATE_AUTO,
],
climate.ATTR_MIN_TEMP: None,
climate.ATTR_MAX_TEMP: None,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,on,cool,heat,heatcool',
'thermostatTemperatureUnit': 'F',
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {
'thermostatMode': 'on',
}, {})
assert len(calls) == 1
calls = async_mock_service(
hass, climate.DOMAIN, SERVICE_TURN_OFF)
await trt.execute(trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {
'thermostatMode': 'off',
}, {})
assert len(calls) == 1
async def test_temperature_setting_climate_range(hass):
"""Test TemperatureSetting trait support for climate domain - range."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
assert trait.TemperatureSettingTrait.supported(
climate.DOMAIN, climate.SUPPORT_OPERATION_MODE, None)
hass.config.units.temperature_unit = TEMP_FAHRENHEIT
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
climate.ATTR_CURRENT_TEMPERATURE: 70,
climate.ATTR_CURRENT_HUMIDITY: 25,
ATTR_SUPPORTED_FEATURES:
climate.SUPPORT_OPERATION_MODE |
climate.SUPPORT_TARGET_TEMPERATURE_HIGH |
climate.SUPPORT_TARGET_TEMPERATURE_LOW,
climate.ATTR_OPERATION_MODE: climate.STATE_AUTO,
climate.ATTR_OPERATION_LIST: [
STATE_OFF,
climate.STATE_COOL,
climate.STATE_HEAT,
climate.STATE_AUTO,
],
climate.ATTR_TARGET_TEMP_HIGH: 75,
climate.ATTR_TARGET_TEMP_LOW: 65,
climate.ATTR_MIN_TEMP: 50,
climate.ATTR_MAX_TEMP: 80
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,cool,heat,heatcool',
'thermostatTemperatureUnit': 'F',
}
assert trt.query_attributes() == {
'thermostatMode': 'heatcool',
'thermostatTemperatureAmbient': 21.1,
'thermostatHumidityAmbient': 25,
'thermostatTemperatureSetpointLow': 18.3,
'thermostatTemperatureSetpointHigh': 23.9,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, {
'thermostatTemperatureSetpointHigh': 25,
'thermostatTemperatureSetpointLow': 20,
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
climate.ATTR_TARGET_TEMP_HIGH: 77,
climate.ATTR_TARGET_TEMP_LOW: 68,
}
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_OPERATION_MODE)
await trt.execute(trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {
'thermostatMode': 'heatcool',
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
climate.ATTR_OPERATION_MODE: climate.STATE_AUTO,
}
with pytest.raises(helpers.SmartHomeError) as err:
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': -100}, {})
assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE
hass.config.units.temperature_unit = TEMP_CELSIUS
async def test_temperature_setting_climate_setpoint(hass):
"""Test TemperatureSetting trait support for climate domain - setpoint."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
assert trait.TemperatureSettingTrait.supported(
climate.DOMAIN, climate.SUPPORT_OPERATION_MODE, None)
hass.config.units.temperature_unit = TEMP_CELSIUS
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
ATTR_SUPPORTED_FEATURES: (
climate.SUPPORT_OPERATION_MODE | climate.SUPPORT_ON_OFF),
climate.ATTR_OPERATION_MODE: climate.STATE_COOL,
climate.ATTR_OPERATION_LIST: [
STATE_OFF,
climate.STATE_COOL,
],
climate.ATTR_MIN_TEMP: 10,
climate.ATTR_MAX_TEMP: 30,
ATTR_TEMPERATURE: 18,
climate.ATTR_CURRENT_TEMPERATURE: 20
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,on,cool',
'thermostatTemperatureUnit': 'C',
}
assert trt.query_attributes() == {
'thermostatMode': 'cool',
'thermostatTemperatureAmbient': 20,
'thermostatTemperatureSetpoint': 18,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
with pytest.raises(helpers.SmartHomeError):
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': -100}, {})
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': 19}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
ATTR_TEMPERATURE: 19
}
async def test_temperature_setting_climate_setpoint_auto(hass):
"""
Test TemperatureSetting trait support for climate domain.
Setpoint in auto mode.
"""
hass.config.units.temperature_unit = TEMP_CELSIUS
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
ATTR_SUPPORTED_FEATURES: (
climate.SUPPORT_OPERATION_MODE | climate.SUPPORT_ON_OFF),
climate.ATTR_OPERATION_MODE: climate.STATE_AUTO,
climate.ATTR_OPERATION_LIST: [
STATE_OFF,
climate.STATE_AUTO,
],
climate.ATTR_MIN_TEMP: 10,
climate.ATTR_MAX_TEMP: 30,
ATTR_TEMPERATURE: 18,
climate.ATTR_CURRENT_TEMPERATURE: 20
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,on,heatcool',
'thermostatTemperatureUnit': 'C',
}
assert trt.query_attributes() == {
'thermostatMode': 'heatcool',
'thermostatTemperatureAmbient': 20,
'thermostatTemperatureSetpointHigh': 18,
'thermostatTemperatureSetpointLow': 18,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': 19}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
ATTR_TEMPERATURE: 19
}
async def test_lock_unlock_lock(hass):
"""Test LockUnlock trait locking support for lock domain."""
assert helpers.get_google_type(lock.DOMAIN, None) is not None
assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN,
None)
trt = trait.LockUnlockTrait(hass,
State('lock.front_door', lock.STATE_UNLOCKED),
PIN_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'isLocked': False
}
assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {'lock': True})
calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': True}, {})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': True},
{'pin': 9999})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': True},
{'pin': '1234'})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'lock.front_door'
}
async def test_lock_unlock_unlock(hass):
"""Test LockUnlock trait unlocking support for lock domain."""
assert helpers.get_google_type(lock.DOMAIN, None) is not None
assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN,
None)
trt = trait.LockUnlockTrait(hass,
State('lock.front_door', lock.STATE_LOCKED),
PIN_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'isLocked': True
}
assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {'lock': False})
calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': False}, {})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': False},
{'pin': 9999})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': False}, {'pin': '1234'})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'lock.front_door'
}
async def test_fan_speed(hass):
"""Test FanSpeed trait speed control support for fan domain."""
assert helpers.get_google_type(fan.DOMAIN, None) is not None
assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED,
None)
trt = trait.FanSpeedTrait(
hass, State(
'fan.living_room_fan', fan.SPEED_HIGH, attributes={
'speed_list': [
fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM,
fan.SPEED_HIGH
],
'speed': 'low'
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableFanSpeeds': {
'ordered': True,
'speeds': [
{
'speed_name': 'off',
'speed_values': [
{
'speed_synonym': ['stop', 'off'],
'lang': 'en'
}
]
},
{
'speed_name': 'low',
'speed_values': [
{
'speed_synonym': [
'slow', 'low', 'slowest', 'lowest'],
'lang': 'en'
}
]
},
{
'speed_name': 'medium',
'speed_values': [
{
'speed_synonym': ['medium', 'mid', 'middle'],
'lang': 'en'
}
]
},
{
'speed_name': 'high',
'speed_values': [
{
'speed_synonym': [
'high', 'max', 'fast', 'highest', 'fastest',
'maximum'],
'lang': 'en'
}
]
}
]
},
'reversible': False
}
assert trt.query_attributes() == {
'currentFanSpeedSetting': 'low',
'on': True,
'online': True
}
assert trt.can_execute(
trait.COMMAND_FANSPEED, params={'fanSpeed': 'medium'})
calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED)
await trt.execute(
trait.COMMAND_FANSPEED, BASIC_DATA, {'fanSpeed': 'medium'}, {})
assert len(calls) == 1
assert calls[0].data == {
'entity_id': 'fan.living_room_fan',
'speed': 'medium'
}
async def test_modes(hass):
"""Test Mode trait."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.ModesTrait.supported(
media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None)
trt = trait.ModesTrait(
hass, State(
'media_player.living_room', media_player.STATE_PLAYING,
attributes={
media_player.ATTR_INPUT_SOURCE_LIST: [
'media', 'game', 'chromecast', 'plex'
],
media_player.ATTR_INPUT_SOURCE: 'game'
}),
BASIC_CONFIG)
attribs = trt.sync_attributes()
assert attribs == {
'availableModes': [
{
'name': 'input source',
'name_values': [
{
'name_synonym': ['input source'],
'lang': 'en'
}
],
'settings': [
{
'setting_name': 'media',
'setting_values': [
{
'setting_synonym': ['media', 'media mode'],
'lang': 'en'
}
]
},
{
'setting_name': 'game',
'setting_values': [
{
'setting_synonym': ['game', 'game mode'],
'lang': 'en'
}
]
},
{
'setting_name': 'chromecast',
'setting_values': [
{
'setting_synonym': ['chromecast'],
'lang': 'en'
}
]
}
],
'ordered': False
}
]
}
assert trt.query_attributes() == {
'currentModeSettings': {'source': 'game'},
'on': True,
'online': True
}
assert trt.can_execute(
trait.COMMAND_MODES, params={
'updateModeSettings': {
trt.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE): 'media'
}})
calls = async_mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE)
await trt.execute(
trait.COMMAND_MODES, BASIC_DATA, {
'updateModeSettings': {
trt.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE): 'media'
}}, {})
assert len(calls) == 1
assert calls[0].data == {
'entity_id': 'media_player.living_room',
'source': 'media'
}
async def test_openclose_cover(hass):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, None) is not None
assert trait.OpenCloseTrait.supported(cover.DOMAIN,
cover.SUPPORT_SET_POSITION, None)
# No position
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'openPercent': 100
}
# No state
trt = trait.OpenCloseTrait(hass, State('cover.bla', STATE_UNKNOWN, {
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
with pytest.raises(helpers.SmartHomeError):
trt.query_attributes()
# Assumed state
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
ATTR_ASSUMED_STATE: True,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
with pytest.raises(helpers.SmartHomeError):
trt.query_attributes()
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
cover.ATTR_CURRENT_POSITION: 75
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'openPercent': 75
}
calls = async_mock_service(
hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
await trt.execute(
trait.COMMAND_OPENCLOSE, BASIC_DATA,
{'openPercent': 50}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'cover.bla',
cover.ATTR_POSITION: 50
}
@pytest.mark.parametrize('device_class', (
cover.DEVICE_CLASS_DOOR,
cover.DEVICE_CLASS_GARAGE,
))
async def test_openclose_cover_secure(hass, device_class):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, device_class) is not None
assert trait.OpenCloseTrait.supported(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class)
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
ATTR_DEVICE_CLASS: device_class,
cover.ATTR_CURRENT_POSITION: 75
}), PIN_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'openPercent': 75
}
calls = async_mock_service(
hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA,
{'openPercent': 50}, {})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA,
{'openPercent': 50}, {'pin': '9999'})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA,
{'openPercent': 50}, {'pin': '1234'})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'cover.bla',
cover.ATTR_POSITION: 50
}
@pytest.mark.parametrize('device_class', (
binary_sensor.DEVICE_CLASS_DOOR,
binary_sensor.DEVICE_CLASS_GARAGE_DOOR,
binary_sensor.DEVICE_CLASS_LOCK,
binary_sensor.DEVICE_CLASS_OPENING,
binary_sensor.DEVICE_CLASS_WINDOW,
))
async def test_openclose_binary_sensor(hass, device_class):
"""Test OpenClose trait support for binary_sensor domain."""
assert helpers.get_google_type(
binary_sensor.DOMAIN, device_class) is not None
assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN,
0, device_class)
trt = trait.OpenCloseTrait(hass, State('binary_sensor.test', STATE_ON, {
ATTR_DEVICE_CLASS: device_class,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'queryOnlyOpenClose': True,
}
assert trt.query_attributes() == {
'openPercent': 100
}
trt = trait.OpenCloseTrait(hass, State('binary_sensor.test', STATE_OFF, {
ATTR_DEVICE_CLASS: device_class,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'queryOnlyOpenClose': True,
}
assert trt.query_attributes() == {
'openPercent': 0
}
|
apache-2.0
| 8,503,505,274,772,646,000
| 31.787167
| 79
| 0.580306
| false
| 3.681103
| true
| false
| false
|
uchchwhash/fortran-linter
|
linter/tokens.py
|
1
|
3201
|
"""
Some lowest-level parsers, that is, tokenizers.
"""
import re
from .parsers import parser, join
from .parsers import Success, Failure
def exact(string, ignore_case=False):
""" Only matches the exact `string`. """
if ignore_case:
string = string.lower()
@parser(repr(string))
def inner(text, start):
""" A parser for the `string`. """
whole = len(string)
segment = text[start: start + whole]
if ignore_case:
segment = segment.lower()
if segment == string:
return Success(text, start, start + whole, string)
else:
raise Failure(text, start, repr(string))
return inner
def satisfies(predicate, desc):
""" Recognize a character satisfying given `predicate`. """
@parser(desc)
def inner(text, start):
""" A parser that applies the `predicate`. """
if start < len(text) and predicate(text[start]):
return Success(text, start, start + 1, text[start])
else:
raise Failure(text, start, desc)
return inner
def one_of(chars):
""" Recognize any of the given characters `chars`. """
return satisfies(lambda c: c in chars, "one of {}".format(chars))
def none_of(chars):
""" Consumes a character that is not on the list `chars`. """
return satisfies(lambda c: c not in chars, "none of {}".format(chars))
#: succeeds for any character
wildcard = satisfies(lambda c: True, "")
#: matches a space character
space = satisfies(lambda c: c.isspace(), "whitespace")
#: matches whitespace
spaces = (+space // join) % "whitespaces"
#: matches optional whitespace
whitespace = (~space // join) % "optional whitespace"
#: matches a letter
letter = satisfies(lambda c: c.isalpha(), "letter")
#: matches a word
word = (+letter // join) % "word"
#: matches a digit
digit = satisfies(lambda c: c.isdigit(), "digit")
#: matches a list of digits
digits = (+digit // join) % "digits"
#: matches one alphanumeric character
alphanumeric = satisfies(lambda c: c.isalnum(), "alphanumeric")
#: matches multiple alphanumeric characters
alphanumerics = (+alphanumeric // join) % "alphanumerics"
def separated_by(prsr, sep, empty=None):
""" A list of `prsr` parsers separated by `sep` parsers. """
@parser
def inner(text, start):
""" A parser that returns the list of values parsed by `prsr`. """
head = prsr.scan(text, start)
tail = (~(sep >> prsr)).scan(text, head.end)
return Success(text, start, tail.end, [head.value] + tail.value)
if empty is None:
return inner
else:
return inner | empty
def liberal(prsr):
""" No fuss about surrounding whitespace. """
return whitespace >> prsr << whitespace
def regex(exp, flags=0):
""" Match a regex. Perhaps too powerful. """
if isinstance(exp, str):
exp = re.compile(exp, flags)
@parser
def inner(text, start):
""" A parser that applies the regex. """
match = exp.match(text, start)
if match:
return Success(text, match.start(), match.end(), match)
else:
raise Failure(text, start, exp.pattern)
return inner
|
mit
| -5,792,294,949,699,803,000
| 25.89916
| 74
| 0.621368
| false
| 3.770318
| false
| false
| false
|
Cadasta/cadasta-platform
|
cadasta/search/tests/test_parser.py
|
1
|
12984
|
import pytest
from django.test import TestCase
from .. import parser
class ParserTest(TestCase):
def test_parse_string(self):
p = parser.query.parseString
# Simple cases
assert p('a').asList() == ['a']
assert p('a ').asList() == ['a']
assert p(' a ').asList() == ['a']
assert p(' a').asList() == ['a']
assert p('a b').asList() == ['a', 'b']
assert p('a b').asList() == ['a', 'b']
assert p(' a b').asList() == ['a', 'b']
assert p('a b ').asList() == ['a', 'b']
assert p(' a b ').asList() == ['a', 'b']
assert p('a_b').asList() == ['a_b']
assert p('a b c').asList() == ['a', 'b', 'c']
assert p('a___ b--- c+++').asList() == ['a___', 'b---', 'c+++']
# Quoted cases
assert p('"a b"').asList() == ['"a b"']
assert p('"a b"').asList() == ['"a b"']
assert p('"a b" c').asList() == ['"a b"', 'c']
assert p('a "b c"').asList() == ['a', '"b c"']
assert p('a "b c" d').asList() == ['a', '"b c"', 'd']
# +- cases
assert p('+a').asList() == [['+', 'a']]
assert p('-a').asList() == [['-', 'a']]
assert p('+"a b"').asList() == [['+', '"a b"']]
assert p('-"a b"').asList() == [['-', '"a b"']]
assert p('b +a').asList() == ['b', ['+', 'a']]
assert p('b -a').asList() == ['b', ['-', 'a']]
assert p('"b +a"').asList() == ['"b +a"']
assert p('"b -a"').asList() == ['"b -a"']
assert p('b+a').asList() == ['b+a']
assert p('b-a').asList() == ['b-a']
assert p('"b+a"').asList() == ['"b+a"']
assert p('"b-a"').asList() == ['"b-a"']
assert p('+a b c').asList() == [['+', 'a'], 'b', 'c']
assert p('-a b c').asList() == [['-', 'a'], 'b', 'c']
assert p('+a "b c"').asList() == [['+', 'a'], '"b c"']
assert p('-a "b c"').asList() == [['-', 'a'], '"b c"']
assert p('a b +c').asList() == ['a', 'b', ['+', 'c']]
assert p('a b -c').asList() == ['a', 'b', ['-', 'c']]
assert p('a "b +c"').asList() == ['a', '"b +c"']
assert p('a "b -c"').asList() == ['a', '"b -c"']
assert p('+a -b +c').asList() == [['+', 'a'], ['-', 'b'], ['+', 'c']]
assert p('-a +b -c').asList() == [['-', 'a'], ['+', 'b'], ['-', 'c']]
assert p('+a -"b +c"').asList() == [['+', 'a'], ['-', '"b +c"']]
assert p('-a +"b -c"').asList() == [['-', 'a'], ['+', '"b -c"']]
assert p('+a-b +c').asList() == [['+', 'a-b'], ['+', 'c']]
assert p('-a+b -c').asList() == [['-', 'a+b'], ['-', 'c']]
assert p('+"a-b" +c').asList() == [['+', '"a-b"'], ['+', 'c']]
assert p('-"a+b" -c').asList() == [['-', '"a+b"'], ['-', 'c']]
assert p('+a-"b +c"').asList() == [['+', 'a-"b'], ['+', 'c"']]
assert p('-a+"b -c"').asList() == [['-', 'a+"b'], ['-', 'c"']]
assert p('+a -b+c').asList() == [['+', 'a'], ['-', 'b+c']]
assert p('-a +b-c').asList() == [['-', 'a'], ['+', 'b-c']]
assert p('+a -"b+c"').asList() == [['+', 'a'], ['-', '"b+c"']]
assert p('-a +"b-c"').asList() == [['-', 'a'], ['+', '"b-c"']]
assert p('+a "-b+c"').asList() == [['+', 'a'], '"-b+c"']
assert p('-a "+b-c"').asList() == [['-', 'a'], '"+b-c"']
def test_parse_query(self):
f = parser.fields
assert parser.parse_query('ab') == {
'bool': {
'should': [
{
'multi_match': {
'query': 'ab',
'fields': f,
'boost': 10,
}
},
{
'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('"a b"') == {
'bool': {
'should': [
{
'multi_match': {
'query': 'a b',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('+ab') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'ab',
'fields': f,
'boost': 10,
}
},
{
'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('+"a b"') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'a b',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('-a') == {
'bool': {
'must_not': [
{
'multi_match': {
'query': 'a',
'fields': f,
'boost': 1,
}
},
{'match': {'archived': True}},
],
}
}
assert parser.parse_query('-"a b"') == {
'bool': {
'must_not': [
{
'multi_match': {
'query': 'a b',
'fields': f,
'type': 'phrase',
'boost': 1,
}
},
{'match': {'archived': True}},
],
}
}
assert parser.parse_query('"a" +"b"') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'should': [
{
'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('"a" -"b"') == {
'bool': {
'must_not': [
{
'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 1,
}
},
{'match': {'archived': True}},
],
'should': [
{
'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
}
}
assert parser.parse_query('+"a" -"b"') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [
{
'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 1,
}
},
{'match': {'archived': True}},
],
}
}
def test_transform_to_dsl(self):
f = parser.fields
assert parser.transform_to_dsl(['a']) == [
{'multi_match': {'query': 'a', 'fields': f, 'boost': 10}},
]
assert parser.transform_to_dsl(['ab']) == [
{'multi_match': {'query': 'ab', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
assert parser.transform_to_dsl(['"a"']) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}},
]
assert parser.transform_to_dsl(['a'], has_fuzziness=False) == [
{'multi_match': {'query': 'a', 'fields': f, 'boost': 1}},
]
assert parser.transform_to_dsl(['"a"'], has_fuzziness=False) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 1,
}},
]
assert parser.transform_to_dsl(['ab', '"b"']) == [
{'multi_match': {'query': 'ab', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
{'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 10,
}},
]
assert parser.transform_to_dsl(['"a"', 'bc']) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}},
{'multi_match': {'query': 'bc', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'bc',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
assert parser.transform_to_dsl(['ab', '"b"'], has_fuzziness=False) == [
{'multi_match': {'query': 'ab', 'fields': f, 'boost': 1}},
{'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 1,
}},
]
assert parser.transform_to_dsl(['"a"', 'bc'], has_fuzziness=False) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 1,
}},
{'multi_match': {'query': 'bc', 'fields': f, 'boost': 1}},
]
assert parser.transform_to_dsl(['"a']) == [
{'multi_match': {'query': '"a', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': '"a',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
assert parser.transform_to_dsl(['a"']) == [
{'multi_match': {'query': 'a"', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'a"',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
def test_get_fuzziness(self):
with pytest.raises(AssertionError):
parser.get_fuzziness('')
assert parser.get_fuzziness('a') == 0
assert parser.get_fuzziness('ab') == 1
assert parser.get_fuzziness('abc') == 1
assert parser.get_fuzziness('abcd') == 2
assert parser.get_fuzziness('abcde') == 2
assert parser.get_fuzziness('abcdef') == 2
|
agpl-3.0
| 4,627,493,749,427,611,000
| 34.768595
| 79
| 0.275878
| false
| 4.076609
| true
| false
| false
|
braindevices/nanoVanalystLib
|
nanoVanalystLib/nanoVanalystLib/imgFileUtils.py
|
1
|
1373
|
# -*- coding: UTF-8 -*-
'''
Created on Mar 12, 2015-1:16:18 PM
@author: Ling Wang<LingWangNeuralEng@gmail.com>
'''
import cv2, os
from Constants_and_Parameters import *
def loadAsGray(imgFile, cropY=[0,880]):
img = cv2.imread(imgFile)
img = img[cropY[0]:cropY[1],:,:]
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return gray
def showImg(img, windName, write = False, outDir = None, prefix = None, waitTime = None, flagShowImg = None):
if outDir == None:
outDir = os.environ[K_PoreAnalyzer_TMP]
if waitTime == None:
waitTime = int(os.environ[K_PoreAnalyzer_IMshowWait])
if prefix == None:
prefix = os.environ[K_PoreAnalyzer_IMprefix]
if flagShowImg == None:
flagShowImg = bool(os.environ[K_FLAG_SHOW_IMG])
if flagShowImg:
cv2.imshow(windName, img)
cv2.waitKey(waitTime)
cv2.destroyWindow(windName)
_file = os.path.join(outDir, prefix+ windName+".png")
cv2.imwrite(_file, img)
return _file
import numpy as np
def saveNPZ(structName, structKwds, outDir = None, prefix = None):
if outDir == None:
outDir = os.environ[K_PoreAnalyzer_TMP]
if prefix == None:
prefix = os.environ[K_PoreAnalyzer_IMprefix]
_npzFile = os.path.join(outDir, prefix+ structName)
np.savez_compressed(_npzFile, **structKwds)
return _npzFile
|
gpl-2.0
| 3,744,444,933,070,862,000
| 28.869565
| 109
| 0.646031
| false
| 2.984783
| false
| false
| false
|
alexvh/pydflatex
|
pydflatex/processor.py
|
1
|
1137
|
#!/usr/bin/env python
# coding: UTF-8
from __future__ import division
import latex_logger
class LaTeXError(Exception):
"""
LaTeX Error
"""
class Processor(object):
"""
Models an object with a logger and some options.
General options:
- colour
- debug
"""
def __init__(self, logger=None, options=None):
# storing the options
self.options = self.defaults.copy()
if options is not None:
self.options.update(options)
# setting up the logger
if logger is not None:
self.logger = logger
else:
self.logger = self.setup_logger()
self.logger.debug("%s\ninitialized with\n%s\n" % (type(self), options))
defaults={
'colour': True,
'debug': False,
}
def setup_logger(self, handlers=None):
if self.options['colour']:
LoggerClass = latex_logger.LaTeXLoggerColour
else:
LoggerClass = latex_logger.LaTeXLogger
logger = LoggerClass('pydflatex')
if not handlers:
if not self.options['debug']:
logger.addHandler(latex_logger.std_handler)
else:
logger.addHandler(latex_logger.debug_handler)
else:
for handler in handlers:
logger.addHandler(handler)
return logger
|
bsd-3-clause
| 5,279,224,763,711,696,000
| 20.055556
| 73
| 0.689534
| false
| 3.175978
| false
| false
| false
|
unistra/django-rest-framework-fine-permissions
|
rest_framework_fine_permissions/permissions.py
|
1
|
2283
|
# -*- coding: utf-8 -*-
""" Provides new permission policies for django-rest-framework
"""
from rest_framework.permissions import DjangoModelPermissions, BasePermission
from django.contrib.contenttypes.models import ContentType
from rest_framework_fine_permissions.models import FilterPermissionModel
from django.core.exceptions import ObjectDoesNotExist
from rest_framework_fine_permissions.serializers import QSerializer
class FullDjangoModelPermissions(DjangoModelPermissions):
"""
The request is authenticated using `django.contrib.auth` permissions.
See: https://docs.djangoproject.com/en/dev/topics/auth/#permissions
It ensures that the user is authenticated, and has the appropriate
`view`/`add`/`change`/`delete` permissions on the model.
This permission can only be applied against view classes that provide a
`.model` or `.queryset` attribute.
"""
perms_map = {
'GET': ['%(app_label)s.view_%(model_name)s'],
'OPTIONS': [],
'HEAD': ['%(app_label)s.view_%(model_name)s'],
'POST': ['%(app_label)s.add_%(model_name)s'],
'PUT': ['%(app_label)s.change_%(model_name)s'],
'PATCH': ['%(app_label)s.change_%(model_name)s'],
'DELETE': ['%(app_label)s.delete_%(model_name)s'],
}
class FilterPermission(BasePermission):
"""
filter permission
"""
def has_object_permission(self, request, view, obj):
"""
check filter permissions
"""
user = request.user
if not user.is_superuser and not user.is_anonymous:
valid = False
try:
ct = ContentType.objects.get_for_model(obj)
fpm = FilterPermissionModel.objects.get(user=user,
content_type=ct)
myq = QSerializer(base64=True).loads(fpm.filter)
try:
myobj = obj.__class__.objects.filter(myq).distinct().get(pk=obj.pk)
if myobj:
valid = True
except ObjectDoesNotExist:
valid = False
except ObjectDoesNotExist:
valid = True
finally:
return valid
else:
return True
|
gpl-2.0
| -2,426,721,725,778,089,000
| 33.575758
| 87
| 0.594654
| false
| 4.363289
| false
| false
| false
|
dparks1134/GenomeTreeTk
|
genometreetk/markers/lgt_test.py
|
1
|
10697
|
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
import os
import sys
import logging
from biolib.external.fasttree import FastTree
from biolib.common import make_sure_path_exists
from genometreetk.default_values import DefaultValues
from genometreetk.common import create_concatenated_alignment
from genometreetk.jackknife_markers import JackknifeMarkers
import dendropy
class LgtTest(object):
"""Identify gene trees that may have undergone one or more lateral transfer.
Specifically, the following test is applied:
1) infer a jackknifed genome tree by randomly subsampling marker genes under 100 replicates
2) identify all splits with at least a minimum jackknife support value, and
where at least a certain percentage of the taxa fall on each side of the split
3) determine how many of these "well-support, internal" splits are recovered in each gene tree
4) filter gene trees which do not recover a specific percentage of these splits
"""
def __init__(self, cpus):
"""Initialize.
Parameters
----------
cpus : int
Number of cpus to use.
"""
self.logger = logging.getLogger()
self.cpus = cpus
def run(self, genome_ids,
marker_genes,
hmm_model_file,
min_support,
min_per_taxa,
perc_markers_to_jackknife,
gene_tree_dir,
alignment_dir,
output_dir):
"""Identify gene trees which do not recover well-support, internal splits in a jackknifed genome tree.
Parameters
----------
genome_ids : iterable
Genomes of interest.
marker_genes : iterable
Unique ids of marker genes.
hmm_model_file : str
File containing HMMs for each marker gene.
min_support : float
Minimum jackknife support of splits to use during LGT filtering [0, 1].
min_per_taxa : float
Minimum percentage of taxa required to consider a split during LGT filtering [0, 1].
perc_markers_to_jackknife : float
Percentage of taxa to keep during marker jackknifing [0, 1].
gene_tree_dir : str
Directory containing gene trees.
alignment_dir : str
Directory containing multiple sequence alignments.
output_dir : str
Output directory.
"""
output_dir = os.path.join(output_dir, 'jackknife_markers')
make_sure_path_exists(output_dir)
# create concatenated alignment file
self.logger.info('Concatenating alignments.')
concatenated_alignment_file = os.path.join(output_dir, 'concatenated_alignment.faa')
marker_file = os.path.join(output_dir, 'concatenated_markers.tsv')
create_concatenated_alignment(genome_ids, marker_genes, alignment_dir, concatenated_alignment_file, marker_file)
# create concatenated genome tree
self.logger.info('Inferring concatenated genome tree.')
concatenated_tree = os.path.join(output_dir, 'concatenated.tree')
concatenated_tree_log = os.path.join(output_dir, 'concatenated.tree.log')
log_file = os.path.join(output_dir, 'concatenated.fasttree.log')
fast_tree = FastTree(multithreaded=True)
fast_tree.run(concatenated_alignment_file, 'prot', 'wag', concatenated_tree, concatenated_tree_log, log_file)
# calculate jackknife support values
self.logger.info('Calculating jackknife marker support values.')
jackknife_markers = JackknifeMarkers(self.cpus)
jackknife_tree = jackknife_markers.run(concatenated_tree, concatenated_alignment_file, marker_file, perc_markers_to_jackknife, 100, 'wag', output_dir)
# jackknife_tree = os.path.join(output_dir, 'concatenated.jk_markers.tree')
# identify well-support, internal splits
self.logger.info('Identifying well-support, internal splits.')
tree = dendropy.Tree.get_from_path(jackknife_tree, schema='newick', rooting='force-unrooted', preserve_underscores=True)
num_leaves = len(tree.leaf_nodes())
num_internal_nodes = 0
num_major_splits = 0
well_supported_major_splits = 0
splits = []
for node in tree.internal_nodes():
num_internal_nodes += 1
num_node_leaves = len(node.leaf_nodes())
if min(num_node_leaves, num_leaves - num_node_leaves) >= max(min_per_taxa * num_leaves, 2):
num_major_splits += 1
if int(node.label) > (min_support * 100.0):
well_supported_major_splits += 1
split = set([x.taxon.label for x in node.leaf_nodes()])
splits.append((split, node.edge_length))
self.logger.info('# internal nodes: %d' % num_internal_nodes)
self.logger.info('# major splits: %d' % num_major_splits)
self.logger.info('# well-supported, major splits: %d' % well_supported_major_splits)
# filter gene trees that do not recover well-support, internal splits
self.logger.info('Filtering gene trees.')
distances = {}
for i, mg in enumerate(sorted(marker_genes)):
sys.stdout.write('==> Processed %d of %d (%.2f) gene trees.\r' % (i + 1, len(marker_genes), (i + 1) * 100.0 / len(marker_genes)))
sys.stdout.flush()
# read gene tree
f = mg + '.tree'
gene_tree_file = os.path.join(gene_tree_dir, f)
gene_tree = dendropy.Tree.get_from_path(gene_tree_file, schema='newick', rooting='force-unrooted', preserve_underscores=True)
# prune gene tree so each genome is present exactly once
processed_genome_ids = set()
taxa_to_prune = []
for node in gene_tree.leaf_nodes():
genome_id = node.taxon.label.split(DefaultValues.SEQ_CONCAT_CHAR)[0]
if genome_id in processed_genome_ids or genome_id not in genome_ids:
taxa_to_prune.append(node.taxon)
processed_genome_ids.add(genome_id)
gene_tree.prune_taxa(taxa_to_prune)
# rename nodes to contain only genome id
gene_tree_taxa_set = set()
for node in gene_tree.leaf_nodes():
genome_id = node.taxon.label.split(DefaultValues.SEQ_CONCAT_CHAR)[0]
node.taxon.label = genome_id
gene_tree_taxa_set.add(genome_id)
# re-encode the split system over the new taxon namespace
gene_tree.migrate_taxon_namespace(dendropy.TaxonNamespace(gene_tree_taxa_set))
gene_tree.encode_bipartitions()
split_bitmasks = set(b.split_bitmask for b in gene_tree.bipartition_encoding)
# determine number of splits recovered by or compatible with this gene tree
recovered_splits = 0
compatible_splits = 0
compatible_edge_length = 0
for split, edge_length in splits:
common_taxa_labels = split.intersection(gene_tree_taxa_set)
common_split = gene_tree.taxon_namespace.taxa_bitmask(labels=common_taxa_labels)
normalized_split = dendropy.Bipartition.normalize_bitmask(
bitmask=common_split,
fill_bitmask=gene_tree.taxon_namespace.all_taxa_bitmask(),
lowest_relevant_bit=1)
if normalized_split in split_bitmasks:
recovered_splits += 1
if gene_tree.is_compatible_with_bipartition(dendropy.Bipartition(bitmask=normalized_split, is_rooted=False)):
compatible_splits += 1
compatible_edge_length += edge_length
perc_recovered_splits = recovered_splits * 100.0 / len(splits)
perc_comp_splits = compatible_splits * 100.0 / len(splits)
norm_comp_edge_length = float(compatible_edge_length) / sum([s[1] for s in splits])
# calculate weighted Robinson-Foulds (Manhattan) and Felsenstein's Euclidean
# distances to the concatenated genome tree
pruned_tree = tree.clone(depth=2)
pruned_tree.retain_taxa_with_labels(gene_tree.taxon_namespace.labels())
pruned_tree.migrate_taxon_namespace(gene_tree.taxon_namespace)
pruned_tree.encode_bipartitions()
pruned_tree_edge_len = sum([e.length for e in pruned_tree.edges() if e.length])
gene_tree_edge_len = sum([e.length for e in gene_tree.edges() if e.length])
pruned_tree.scale_edges(1.0 / pruned_tree_edge_len)
gene_tree.scale_edges(1.0 / gene_tree_edge_len)
manhattan = dendropy.calculate.treecompare.weighted_robinson_foulds_distance(pruned_tree, gene_tree)
euclidean = dendropy.calculate.treecompare.euclidean_distance(pruned_tree, gene_tree)
distances[mg] = (perc_recovered_splits, perc_comp_splits, norm_comp_edge_length, manhattan, euclidean)
return distances, num_internal_nodes, num_major_splits, well_supported_major_splits
|
gpl-3.0
| -828,814,402,445,759,400
| 47.294931
| 158
| 0.581284
| false
| 4.131711
| false
| false
| false
|
gabrielelanaro/pyquante
|
PyQuante/MINDO3.py
|
1
|
28237
|
"""\
MINDO3.py: Dewar's MINDO/3 Semiempirical Method
This program is part of the PyQuante quantum chemistry program suite.
Copyright (c) 2004, Richard P. Muller. All Rights Reserved.
PyQuante version 1.2 and later is covered by the modified BSD
license. Please see the file LICENSE that is part of this
distribution.
"""
from Constants import bohr2ang,e2,ev2kcal
from MINDO3_Parameters import axy,Bxy
from math import sqrt,exp,pow
from NumWrap import zeros,eigh,dot,array
from LA2 import mkdens,trace2
from PyQuante.Convergence import SimpleAverager
A0 = bohr2ang
def get_beta0(atnoi,atnoj):
"Resonanace integral for coupling between different atoms"
return Bxy[(min(atnoi,atnoj),max(atnoi,atnoj))]
def get_alpha(atnoi,atnoj):
"Part of the scale factor for the nuclear repulsion"
return axy[(min(atnoi,atnoj),max(atnoi,atnoj))]
def get_gamma(atomi,atomj):
"Coulomb repulsion that goes to the proper limit at R=0"
R2 = atomi.dist2(atomj)*bohr2ang**2
return e2/sqrt(R2+0.25*pow(atomi.rho+atomj.rho,2))
def get_g(bfi,bfj):
"Coulomb-like term for orbitals on the same atom"
i,j = bfi.type,bfj.type
assert bfi.atom is bfj.atom, "Incorrect call to get_g"
if i==0 and j==0:
return bfi.atom.gss
elif i==0 or j==0:
return bfi.atom.gsp
elif i==j:
return bfi.atom.gpp
return bfi.atom.gppp
def get_h(bfi,bfj):
"Exchange-like term for orbitals on the same atom"
i,j = bfi.type,bfj.type
assert bfi.atom is bfj.atom, "Incorrect call to get_h"
if i==0 or j==0:
return bfi.atom.hsp
return bfi.atom.hppp
def get_nbf(atoms):
"Number of basis functions in an atom list"
nbf = 0
for atom in atoms: nbf += atom.nbf
return nbf
def get_F0_old(atoms):
"Form the zero-iteration (density matrix independent) Fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F0 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for i in xrange(atomi.nbf):
bfi = atomi.basis[i]
F0[ibf+i,ibf+i] = bfi.u
jbf = 0
for jat in xrange(nat):
atomj = atoms[jat]
if iat != jat:
gammaij = get_gamma(atomi,atomj)
betaij = get_beta0(atomi.atno,atomj.atno)
F0[ibf+i,ibf+i] -= gammaij*atomj.Z
for j in xrange(atomj.nbf):
bfj = atomj.basis[j]
Sij = bfi.cgbf.overlap(bfj.cgbf)
#Sij = mopac_overlap(bfi,bfj)
IPij = bfi.ip+bfj.ip
F0[ibf+i,jbf+j] = betaij*IPij*Sij
F0[jbf+j,ibf+i] = F0[ibf+i,jbf+j]
jbf += atomj.nbf
ibf += atomi.nbf
return F0
def get_F0(atoms):
"Form the zero-iteration (density matrix independent) Fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F0 = zeros((nbf,nbf),'d')
basis = []
for atom in atoms:
for bf in atom.basis:
basis.append(bf)
# U term
for i in xrange(nbf):
F0[i,i] = basis[i].u
# Nuclear attraction
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for jat in xrange(nat):
atomj = atoms[jat]
if iat == jat: continue
gammaij = get_gamma(atomi,atomj)
for i in xrange(atomi.nbf):
F0[ibf+i,ibf+i] -= gammaij*atomj.Z
ibf += atomi.nbf
# Off-diagonal term
for ibf in xrange(nbf):
bfi = basis[ibf]
ati = bfi.atom
atnoi = ati.atno
for jbf in xrange(ibf):
bfj = basis[jbf]
atj = bfj.atom
atnoj = atj.atno
betaij = get_beta0(atnoi,atnoj)
Sij = bfi.cgbf.overlap(bfj.cgbf)
IPij = bfi.ip + bfj.ip
F0[ibf,jbf] = F0[jbf,ibf] = betaij*IPij*Sij
return F0
def get_F1(atoms,D):
"One-center corrections to the core fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F1 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for i in xrange(atomi.nbf):
bfi = atomi.basis[i]
gii = get_g(bfi,bfi)
qi = D[ibf+i,ibf+i]
F1[ibf+i,ibf+i] = 0.5*qi*gii
for j in xrange(atomi.nbf): # ij on same atom
if j != i:
bfj = atomi.basis[j]
qj = D[ibf+j,ibf+j]
gij = get_g(bfi,bfj)
pij = D[ibf+i,ibf+j]
hij = get_h(bfi,bfj)
# the following 0.5 is something of a kludge to match
# the mopac results.
F1[ibf+i,ibf+i] += qj*gij - 0.5*qj*hij
F1[ibf+i,ibf+j] += 0.5*pij*(3*hij-gij)
ibf += atomi.nbf
return F1
def get_F1_open(atoms,Da,Db):
"One-center corrections to the core fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F1 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for i in xrange(atomi.nbf):
gii = get_g(atomi.basis[i],atomi.basis[i])
qib = Db[ibf+i,ibf+i]
#electron only interacts with the other electron in orb,
# not with itself
F1[ibf+i,ibf+i] = qib*gii
for j in xrange(atomi.nbf): # ij on same atom
if j != i:
qja = Da[ibf+j,ibf+j]
qjb = Db[ibf+j,ibf+j]
qj = qja+qjb
gij = get_g(atomi.basis[i],atomi.basis[j])
pija = Da[ibf+i,ibf+j]
pijb = Db[ibf+i,ibf+j]
pij = pija + pijb
hij = get_h(atomi.basis[i],atomi.basis[j])
# the following 0.5 is something of a kludge to match
# the mopac results.
F1[ibf+i,ibf+i] += qj*gij - qja*hij
F1[ibf+i,ibf+j] += 2*pij*hij - pija*(hij+gij)
ibf += atomi.nbf
return F1
Gij_cache = None
def get_F2(atoms,D,use_cache=False):
"Two-center corrections to the core fock matrix"
global Gij_cache
nbf = get_nbf(atoms)
nat = len(atoms)
F2 = zeros((nbf,nbf),'d')
# Optionally cache Gamma values
if use_cache and Gij_cache is None:
Gij_cache = zeros((nat,nat),'d')
for iat in xrange(nat):
atomi = atoms[iat]
for jat in xrange(iat):
atomj = atoms[jat]
Gij_cache[iat,jat] = get_gamma(atomi,atomj)
Gij_cache[jat,iat] = Gij_cache[iat,jat]
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
jbf = 0
for jat in xrange(nat):
atomj = atoms[jat]
if iat != jat:
if use_cache:
gammaij = Gij_cache[iat,jat]
else:
gammaij = get_gamma(atomi,atomj)
for i in xrange(atomi.nbf):
qi = D[ibf+i,ibf+i]
qj = 0
for j in xrange(atomj.nbf):
pij = D[ibf+i,jbf+j]
F2[ibf+i,jbf+j] -= 0.25*pij*gammaij
F2[jbf+j,ibf+i] = F2[ibf+i,jbf+j]
qj += D[jbf+j,jbf+j]
F2[jbf+j,jbf+j] += 0.5*qi*gammaij
F2[ibf+i,ibf+i] += 0.5*qj*gammaij
jbf += atomj.nbf
ibf += atomi.nbf
return F2
def get_F2_open(atoms,Da,Db):
"Two-center corrections to the core fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F2 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
jbf = 0
for jat in xrange(nat):
atomj = atoms[jat]
if iat != jat:
gammaij = get_gamma(atomi,atomj)
for i in xrange(atomi.nbf):
for j in xrange(atomj.nbf):
pija = Da[ibf+i,jbf+j]
pijb = Db[ibf+i,jbf+j]
pij = pija+pijb
qja = Da[jbf+j,jbf+j]
qjb = Db[jbf+j,jbf+j]
qj = qja+qjb
qia = Da[ibf+i,ibf+i]
qib = Db[ibf+i,ibf+i]
qi = qia+qib
F2[ibf+i,jbf+j] -= 0.25*pij*gammaij
F2[jbf+j,ibf+i] = F2[ibf+i,jbf+j]
# The following 0.5 is a kludge
F2[ibf+i,ibf+i] += 0.5*qj*gammaij
F2[jbf+j,jbf+j] += 0.5*qi*gammaij
jbf += atomj.nbf
ibf += atomi.nbf
return F2
def get_nel(atoms,charge=0):
"Number of electrons in an atoms. Can be dependent on the charge"
nel = 0
for atom in atoms: nel += atom.Z
return nel-charge
def get_enuke(atoms):
"Compute the nuclear repulsion energy"
enuke = 0
for i in xrange(len(atoms)):
atomi = atoms[i]
for j in xrange(i):
atomj = atoms[j]
R2 = atomi.dist2(atomj)*bohr2ang**2
R = sqrt(R2)
scale = get_scale(atomi.atno,atomj.atno,R)
gammaij = get_gamma(atomi,atomj)
enuke_ij = atomi.Z*atomj.Z*gammaij \
+ abs(atomi.Z*atomj.Z*(e2/R-gammaij)*scale)
enuke += enuke_ij
#print "R ",i+1,j+1,enuke_ij,enuke
return enuke
def get_scale(atnoi,atnoj,R):
"Prefactor from the nuclear repulsion term"
alpha = get_alpha(atnoi,atnoj)
if atnoi == 1:
if atnoj == 7 or atnoj == 8:
return alpha*exp(-R)
elif atnoj == 1:
if atnoi == 7 or atnoi == 8:
return alpha*exp(-R)
return exp(-alpha*R)
def get_guess_D(atoms):
"Average occupation density matrix"
nbf = get_nbf(atoms)
D = zeros((nbf,nbf),'d')
ibf = 0
for atom in atoms:
atno = atom.atno
for i in xrange(atom.nbf):
if atno == 1:
D[ibf+i,ibf+i] = atom.Z/1.
else:
D[ibf+i,ibf+i] = atom.Z/4.
ibf += atom.nbf
return D
def get_reference_energy(atoms):
"Ref = heat of formation - energy of atomization"
eat = 0
hfat = 0
for atom in atoms:
eat += atom.Eref
hfat += atom.Hf
return hfat-eat*ev2kcal
def get_open_closed(nel,mult=None):
"Get the number of open/closed orbitals based on nel & multiplicity"
nclosed,nopen = divmod(nel,2)
if mult: #test the multiplicity
nopen = mult-1
nclosed,ntest = divmod(nel-nopen,2)
if ntest:
raise Exception("Impossible nel, multiplicity %d %d " % (nel,mult))
return nclosed,nopen
def get_Hf(atoms,Eel):
Enuke = get_enuke(atoms)
Eref = get_reference_energy(atoms)
Etot = Eel + Enuke
return Etot*ev2kcal+Eref
def scf(atoms,**opts):
"Driver routine for energy calculations"
chg = opts.get('chg',0)
mult = opts.get('mult',None)
verbose = opts.get('verbose',False)
atoms = initialize(atoms)
nel = get_nel(atoms)-int(chg)
nclosed,nopen = get_open_closed(nel,mult)
Enuke = get_enuke(atoms)
nbf = get_nbf(atoms)
eref = get_reference_energy(atoms)
if verbose:
print "Nel = %d, Nclosed = %d, Nopen = %d," % (nel,nclosed,nopen), \
"Enuke = %10.4f, Nbf = %d" % (Enuke,nbf)
F0 = get_F0(atoms)
if nopen:
Eel = scfopen(atoms,F0,nclosed+nopen,nclosed,**opts)
else:
Eel = scfclosed(atoms,F0,nclosed,**opts)
Etot = Eel+Enuke
Hf = Etot*ev2kcal+eref
if verbose: print "Final Heat of Formation = ",Hf
return Hf
def scfclosed(atoms,F0,nclosed,**opts):
"SCF procedure for closed-shell molecules"
verbose = opts.get('verbose',False)
do_avg = opts.get('avg',False)
maxiter = opts.get('maxiter',50)
D = get_guess_D(atoms)
Eold = 0
if do_avg: avg = SimpleAverager(do_avg)
for i in xrange(maxiter):
if do_avg: D = avg.getD(D)
F1 = get_F1(atoms,D)
F2 = get_F2(atoms,D)
F = F0+F1+F2
Eel = 0.5*trace2(D,F0+F)
if verbose: print i+1,Eel,get_Hf(atoms,Eel)
#if verbose: print i+1,Eel
if abs(Eel-Eold) < 0.001:
if verbose:
print "Exiting because converged",i+1,Eel,Eold
break
Eold = Eel
orbe,orbs = eigh(F)
D = 2*mkdens(orbs,0,nclosed)
return Eel
def scfopen(atoms,F0,nalpha,nbeta,**opts):
"SCF procedure for open-shell molecules"
verbose = opts.get('verbose',False)
D = get_guess_D(atoms)
Da = 0.5*D
Db = 0.5*D
Eold = 0
for i in xrange(10):
F1a = get_F1_open(atoms,Da,Db)
F1b = get_F1_open(atoms,Db,Da)
F2a = get_F2_open(atoms,Da,Db)
F2b = get_F2_open(atoms,Db,Da)
Fa = F0+F1a+F2a
Fb = F0+F1b+F2b
Eel = 0.5*trace2(Da,F0+Fa)+0.5*trace2(Db,F0+Fb)
if verbose: print i,Eel
if abs(Eel-Eold) < 0.001: break
Eold = Eel
orbea,orbsa = eigh(Fa)
orbeb,orbsb = eigh(Fb)
Da = mkdens(orbsa,0,nalpha)
Db = mkdens(orbsb,0,nbeta)
return Eel
def initialize(atoms):
"Assign parameters for the rest of the calculation"
from Slater import gauss_powers,gexps,gcoefs,s_or_p
from MINDO3_Parameters import Uss,Upp,IPs,IPp,CoreQ,f03,nbfat,\
zetas,zetap,Eat,Hfat,gss,gsp,gpp,gppp,hsp,hppp,NQN
from CGBF import CGBF
from Bunch import Bunch # Generic object to hold basis functions
ibf = 0 # Counter to overall basis function count
for atom in atoms:
xyz = atom.pos()
atom.Z = CoreQ[atom.atno]
atom.basis = []
atom.rho = e2/f03[atom.atno]
atom.nbf = nbfat[atom.atno]
atom.Eref = Eat[atom.atno]
atom.Hf = Hfat[atom.atno]
atom.gss = gss[atom.atno]
atom.gsp = gsp[atom.atno]
atom.gpp = gpp[atom.atno]
atom.gppp = gppp[atom.atno]
atom.hsp = hsp[atom.atno]
atom.hppp = hppp[atom.atno]
for i in xrange(atom.nbf):
bfunc = Bunch()
atom.basis.append(bfunc)
bfunc.index = ibf # pointer to overall basis function index
ibf += 1
bfunc.type = i # s,x,y,z
bfunc.atom = atom # pointer to parent atom
bfunc.cgbf = CGBF(xyz,gauss_powers[i])
zi = gexps[(NQN[atom.atno],s_or_p[i])]
ci = gcoefs[(NQN[atom.atno],s_or_p[i])]
if i:
zeta = zetap[atom.atno]
bfunc.u = Upp[atom.atno]
bfunc.ip = IPp[atom.atno]
else:
zeta = zetas[atom.atno]
bfunc.u = Uss[atom.atno]
bfunc.ip = IPs[atom.atno]
for j in xrange(len(zi)):
bfunc.cgbf.add_primitive(zi[j]*zeta*zeta,ci[j])
bfunc.cgbf.normalize()
return atoms
def get_fock(atoms):
"Just return the 0th iteration fock matrix"
atoms = initialize(atoms)
F0 = get_F0(atoms)
D = get_guess_D(atoms)
F1 = get_F1(atoms,D)
F2 = get_F2(atoms,D)
return F0+F1+F2
def energy_forces_factories(atoms,**kwargs):
# This is a factory function. It creates two functions, one that,
# given a vector of coordinates, returns an energy, and another that,
# given a vector of corrdinates, returns a vector of gradients. The
# factory function also returns a list of initial coordinates. The two
# functions and the initial coordinates are useful for calling the
# optimizer functions.
verbose_level = kwargs.get('verbose_level',0)
return_etot_as_e = kwargs.get('return_etot_as_e',False)
numeric_forces = kwargs.get('numeric_forces',False)
nat = len(atoms)
coords = zeros(3*nat,'d')
for i in xrange(nat):
for j in xrange(3):
coords[3*i+j] = atoms[i].r[j]
def Efunc(cnew):
for i in xrange(nat):
for j in xrange(3):
atoms[i].r[j] = cnew[3*i+j]
Hf,F = get_energy_forces(atoms,doforces=False)
if verbose_level > 1:
print "MINDO3 energy calculation requested:"
print atoms
print Hf
# Recompute the total energy:
eref = get_reference_energy(atoms)
Etot = (Hf-eref)/ev2kcal
if return_etot_as_e: return Etot
return Hf
def Ffunc(cnew):
for i in xrange(nat):
for j in xrange(3):
atoms[i].r[j] = cnew[3*i+j]
Hf,Forces = get_energy_forces(atoms,doforces=True)
F = zeros(3*nat,'d')
for i in xrange(nat):
for j in xrange(3):
F[3*i+j] = Forces[i,j]
if verbose_level > 0:
print "MINDO3 gradient calculation requested:"
print atoms
print Hf
return F
def Ffunc_num(cnew):
E0 = Efunc(cnew)
F = zeros(3*nat,'d')
ei = zeros(3*nat,'d')
dx = 1e-7
for i in xrange(nat):
for j in xrange(3):
ei[3*i+j] = 1.0
E1 = Efunc(cnew+ei*dx)
ei[3*i+j] = 0.0
F[3*i+j] = (E1-E0)/dx
if verbose_level > 0:
print "MINDO3 gradient calculation requested:"
print atoms
print Hf
return F
if numeric_forces: return coords,Efunc,Ffunc_num
return coords,Efunc,Ffunc
def opt(atoms,**kwargs):
from PyQuante.optimize import fminBFGS
c0,Efunc,Ffunc = energy_forces_factories(atoms,**kwargs)
print "C0 = ",c0
# Currently optimization works when I use Energies and numerical
# forces, but not using the analytical forces. Obviously something
# is wrong somewhere here, but I don't have time to fix this now.
# Hopefully the final fix won't be too hard.
copt = fminBFGS(Efunc,c0,Ffunc,avegtol=1e-4)
#copt = fminBFGS(Efunc,c0,None,avegtol=1e-4)
Efinal = Efunc(copt)
return Efinal,copt
def get_energy_forces(atoms,**opts):
opts['return_energy'] = True
return numeric_forces(atoms,**opts)
def numeric_forces(atoms,D=None,**opts):
"Compute numerical forces on atoms"
# D is ignored here.
dx = opts.get('dx',1e-6)
sym = opts.get('sym',True)
return_energy = opts.get('return_energy',False)
nat = len(atoms)
Forces = zeros((nat,3),'d')
E0 = scf(atoms)
for iat in xrange(nat):
for idir in xrange(3):
dr = zeros(3,'d')
dr[idir] = dx
atoms[iat].translate(dr)
Ep = scf(atoms)
atoms[iat].translate(-dr)
if sym:
atoms[iat].translate(-dr)
Em = scf(atoms)
atoms[iat].translate(dr)
Forces[iat,idir] = 0.5*(Ep-Em)/dx
else:
Forces[iat,idir] = (Ep-E0)/dx
if return_energy: return E0,Forces
return Forces
def forces(atoms,D):
"Compute analytic forces on list of atoms"
print "Warning: Analytical forces not tested yet!"
nat = len(atoms)
Forces = zeros((nat,3),'d')
# Loop over all pairs of atoms and compute the force between them
#cached_dSij = full_dSij(atoms)
for iat in xrange(nat):
atomi = atoms[iat]
for jat in xrange(iat):
atomj = atoms[jat]
alpha = get_alpha(atomi.atno,atomj.atno)
beta = get_beta0(atomi.atno,atomj.atno)
R2 = atomi.dist2(atomj)*bohr2ang**2
R = sqrt(R2)
c2 = 0.25*pow(atomi.rho+atomj.rho,2)
for dir in xrange(3):
Fij = 0 # Force between atoms iat and jat in direction dir
# initialize some constants
delta = atomi.r[dir]-atomj.r[dir]
c1 = delta*atomi.Z*atomj.Z*e2/R
dr1 = e2*delta*pow(R2+c2,-1.5)
# Nuclear repulsion terms
if ( (atomi.atno == 1
and (atomj.atno == 7 or atomj.atno == 8))
or (atomj.atno == 1
and (atomi.atno == 7 or atomi.atno == 8))):
# Special case of NH or OH bonds
Fij += -c1*alpha*(1/R2 - R*pow(R2+c2,-1.5)
+ 1/R - 1/sqrt(R2+c2))*exp(-R) \
- c1*R*pow(R2+c2,-1.5)
else:
Fij += -c1*(1/R2 - R*pow(R2+c2,-1.5) + alpha/R
- alpha/sqrt(R2+c2))*exp(-alpha*R) \
- c1*R*pow(R2+c2,-1.5)
# Overlap terms
for bfi in atomi.basis:
for bfj in atomj.basis:
Dij = D[bfi.index,bfj.index]
dSij = mopac_doverlap(bfi,bfj,dir)
#dSij = -bfi.cgbf.doverlap(bfj.cgbf,dir)/bohr2ang
#dSij = -bfi.cgbf.doverlap_num(bfj.cgbf,dir)/bohr2ang
Fij += 2*beta*(bfi.ip+bfj.ip)*Dij*dSij
# Core attraction terms
for bfj in atomj.basis:
Fij += atomi.Z*D[bfj.index,bfj.index]*dr1
for bfi in atomi.basis:
Fij += atomj.Z*D[bfi.index,bfi.index]*dr1
# Two-electron terms
for bfi in atomi.basis:
for bfj in atomj.basis:
Dii = D[bfi.index,bfi.index]
Djj = D[bfj.index,bfj.index]
Dij = D[bfi.index,bfj.index]
# exchange is the first term, coulomb is second:
Fij += 0.5*dr1*pow(Dij,2)-dr1*Dii*Djj
# Now sum total forces and convert to kcal/mol
Forces[iat][dir] += ev2kcal*Fij
Forces[jat][dir] -= ev2kcal*Fij
return Forces
def mopac_overlap(bfi,bfj): # from the routine gover.f
cgbfi,cgbfj = bfi.cgbf,bfj.cgbf
ri = cgbfi.origin # distance in bohr
rj = cgbfj.origin
RR = pow(ri[0]-rj[0],2)+pow(ri[1]-rj[1],2)+pow(ri[2]-rj[2],2)
itype = bfi.type
jtype = bfj.type
Sij = 0
for primi in cgbfi.prims:
for primj in cgbfj.prims():
amb = primialpha+primjalpha
apb = primialpha*primjalpha
adb = apb/amb
if itype > 0 and jtype > 0:
#is = 4
tomb = (ri[itype-1]-rj[itype-1])*(ri[jtype-1]-rj[jtype-1])
abn = -adb*tomb
if itype == jtype: abn = abn + 0.5
abn = 4*abn*sqrt(apb)/amb
elif itype > 0:
#is = 3
tomb = (ri[itype-1]-rj[itype-1])
abn = -2*tomb*primjalpha*sqrt(primialpha)/amb
elif jtype > 0:
#is = 2
tomb = (ri[jtype-1]-rj[jtype-1])
abn = 2*tomb*primialpha*sqrt(primjalpha)/amb
else:
#is = 1
abn = 1.0
if adb*RR < 90:
Sij += primi.coef*primj.coef*\
pow(2*sqrt(apb)/amb,1.5)*exp(-adb*RR)*abn
return Sij
def mopac_doverlap(bfi,bfj,direction): # from the routine dcart.f
cgbfi,cgbfj = bfi.cgbf,bfj.cgbf
ri = cgbfi.origin # distance in bohr
rj = cgbfj.origin
RR = pow(ri[0]-rj[0],2)+pow(ri[1]-rj[1],2)+pow(ri[2]-rj[2],2)
del1 = ri[direction] - rj[direction]
itype = bfi.type
jtype = bfj.type
DS = 0
for primi in cgbfi.prims:
primialpha = primi.exp
for primj in cgbfj.prims:
primjalpha = primj.exp
del2 = del3 = 0
SS = 0
apb = primialpha*primjalpha
amb = primialpha+primjalpha
adb = apb/amb
adr = min(adb*RR,35.0)
if itype == 0 and jtype == 0: # ss
# is=1
abn = -2.*adb*del1/A0
elif itype == 0 and jtype > 0: # sp
if jtype-1 == direction:
#is = 3
abn = 2*adb/sqrt(primjalpha)*(1-2*adb*del1*del1)/A0
else:
#is = 2
del2 = ri[jtype-1]-rj[jtype-1]
abn = -4*adb*adb*del1*del2/sqrt(primjalpha)/A0
elif itype > 0 and jtype == 0: # ps
if itype-1 == direction:
#is = 5
abn = -2*adb/sqrt(primialpha)*(1-2*adb*del1*del1)/A0
else:
#is = 4
del2 = ri[itype-1]-rj[itype-1]
abn = 4*adb*adb*del1*del2/sqrt(primialpha)/A0
elif itype == jtype:
if direction == itype-1:
#is = 9 (p|p)
abn=-8*adb*adb*del1/sqrt(apb)*(1.5-adb*del1*del1)/A0
else:
#is = 8 (p'|p')
del2 = ri[jtype-1]-rj[jtype-1]
abn=-8*pow(adb,2)*del1/sqrt(apb)*(0.5-adb*del2*del2)/A0
elif (direction != itype-1) and (direction != jtype-1):
#is = 7(p'|p")
del2 = ri[itype-1] - rj[itype-1]
del3 = ri[jtype-1] - rj[jtype-1]
abn=8*pow(adb,3)*del1*del2*del3/sqrt(apb)/A0
else:
#is = 6 (p|p') or (p'|p)
del2 = ri[itype+jtype-direction-2]-rj[itype+jtype-direction-2]
abn=-4*adb*adb*del2/sqrt(apb)*(1-2*adb*del1*del1)/A0
SS = pow(2*sqrt(apb)/amb,1.5)*exp(-adr)*abn
DS += SS*primi.coef*primj.coef
return DS
def test_olap():
# Test function to compare results of my CGBF overlap routines to those
# of mopacs. The issue is that the derivative gives different results.
from math import sin,cos
from copy import deepcopy
delta = 0.001
for theta in [0.,10.,20.,30.,45.,55.214134,90.]:
at1 = (1,(0,0,0))
at2 = (6,(cos(theta),sin(theta),0.1))
atoms = initialize([at1,at2])
bfi = atoms[0].basis[0]
bfj = atoms[1].basis[2]
dSijx = mopac_doverlap(bfi,bfj,0)
dSijy = mopac_doverlap(bfi,bfj,1)
dSijz = mopac_doverlap(bfi,bfj,2)
dSijx2 = -bfi.cgbf.doverlap(bfj.cgbf,0)/bohr2ang
dSijy2 = -bfi.cgbf.doverlap(bfj.cgbf,1)/bohr2ang
dSijz2 = -bfi.cgbf.doverlap(bfj.cgbf,2)/bohr2ang
dSijx4 = -bfi.cgbf.doverlap_num(bfj.cgbf,0)/bohr2ang
dSijy4 = -bfi.cgbf.doverlap_num(bfj.cgbf,1)/bohr2ang
dSijz4 = -bfi.cgbf.doverlap_num(bfj.cgbf,2)/bohr2ang
print "%2d %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f " %\
(theta,dSijx,dSijy,dSijz,dSijx2,dSijy2,dSijz2)
return
def write_mopac_input(atoms,fname=None):
from PyQuante.Element import symbol
from PyQuante.Constants import bohr2ang
if not fname: fname = atoms.name + ".dat"
lines = ['MINDO3',atoms.name,'Input file written by PyQuante']
for atom in atoms:
atno = atom.atno
sym = symbol[atno]
x,y,z = [bohr2ang*i for i in atom.r]
lines.append('%s %10.4f 0 %10.4f 0 %10.4f 0'
% (sym,x,y,z))
open(fname,'w').write('\n'.join(lines))
return
if __name__ == '__main__':
from Molecule import Molecule
h2o = Molecule('H2O',atomlist=[(8,(0,0,0)),(1,(1.,0,0)),(1,(0,1.,0))])
oh = Molecule('OH',atomlist=[(8,(0,0,0)),(1,(1.,0,0))])
ch4 = Molecule('Methane', atomlist =
[(6,(0,0,0)),(1,(1.,0,0)),(1,(0,1.,0)),
(1,(0,0,1.)),(1,(0,0,-1.))])
print scf(h2o)
print scf(oh)
print scf(ch4)
#E,F = get_energy_forces(ch4)
#for Fi in F: print Fi
#import profile,pstats
#profile.run('get_energy_forces(ch4)','prof')
#prof = pstats.Stats('prof')
#prof.strip_dirs().sort_stats('time').print_stats(15)
#test_olap()
|
bsd-3-clause
| 7,576,133,613,060,574,000
| 33.351582
| 79
| 0.515458
| false
| 2.914336
| false
| false
| false
|
hoh/Billabong
|
billabong/storage/__init__.py
|
1
|
1393
|
# Copyright (c) 2015 "Hugo Herter http://hugoherter.com"
#
# This file is part of Billabong.
#
# Intercom is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Blob Storage."""
from .abstract import Storage
from .folder import FolderStorage
from .http import HTTPStorage
from .ssh import SSHStorage
assert Storage
assert FolderStorage
assert HTTPStorage
assert SSHStorage
def load_store(store_settings):
"""Instanciate a storage instance from settings."""
type_ = store_settings['type']
args = store_settings.get('args', {})
if type_ == 'FolderStorage':
return FolderStorage(**args)
elif type_ == 'HTTPStorage':
return HTTPStorage(**args)
elif type_ == 'SSHStorage':
return SSHStorage(**args)
else:
raise ValueError("Unknown type", type_)
|
agpl-3.0
| -8,488,347,488,452,236,000
| 30.659091
| 74
| 0.723618
| false
| 4.049419
| false
| false
| false
|
dariocorral/panoanda
|
panoanda/candles.py
|
1
|
3916
|
"""
Created on Sun Sep 17 09:27:31 2017
@author: dariocorral
"""
import pandas as pd
from datetime import timedelta
from tickers import Tickers
from hourOffset import Hour
class Candles(object):
"""
OANDA Historical Rates ready to use with Pandas
"""
def __init__(self):
self.__hour = Hour()
self.__tickers = Tickers()
def dataframe(self,periods, granularity,sundayCandle, *ticker):
"""
OANDA Historical Rates
:param periods: number of periods
:type: int
:paramm granularity: OANDA timeframes:
“S5” - 5 seconds
“S10” - 10 seconds
“S15” - 15 seconds
“S30” - 30 seconds
“M1” - 1 minute
“M2” - 2 minutes
“M3” - 3 minutes
“M4” - 4 minutes
“M5” - 5 minutes
“M10” - 10 minutes
“M15” - 15 minutes
“M30” - 30 minutes
“H1” - 1 hour
“H2” - 2 hours
“H3” - 3 hours
“H4” - 4 hours
“H6” - 6 hours
“H8” - 8 hours
“H12” - 12 hours
“D” - 1 Day
“W” - 1 Week
“M” - 1 Month
:type: string
:param sundayCandle: True -> Sunday Candles included
False -> No Sunday Candles
:type : bool
:param ticker: required instruments format OANDA API
:type: str, tuple or list
:return: dataFrame object
"""
#Define empty dataframe
df = pd.DataFrame()
for instr in ticker:
histRates = self.__tickers._oanda_api.get_history(count =
int(periods * 1.2), instrument= instr,
candleFormat = 'midpoint',granularity= granularity,
dailyAlignment= (self.__hour.hour_offset_calculate(
6 ,
self.__hour.offset_NY_GMT)),
weeklyAlignment='Monday')
#From dict to dataframe
histRates = histRates.get('candles')
histRates = pd.DataFrame.from_dict(histRates)
histRates['ticker'] = instr
histRates['time'] = pd.to_datetime(histRates['time'])
#Apply GMT_hours_offset to local time
histRates['time'] += timedelta(hours =
self.__hour.offset_local_GMT)
histRates.set_index ('time', inplace = True)
#Sunday candle filter
if sundayCandle == False:
histRates['Weekday'] = histRates.index.weekday
histRates = histRates.loc[histRates['Weekday'] != 6]
histRates = histRates.tail(periods)
else:
histRates = histRates.tail(periods)
#Daily and weekly granularity in date format without hours
if granularity == 'D' or granularity == 'W':
histRates.index = histRates.index.date
#Columns definition
histRates= histRates[['ticker','openMid','highMid','lowMid',
'closeMid','volume','complete']]
histRates.columns = ('ticker','open','high','low','close','volume',
'complete')
df = df.append(histRates)
return df
|
mit
| 5,409,763,454,632,025,000
| 32.008621
| 81
| 0.439394
| false
| 4.535545
| false
| false
| false
|
JioCloud/cinder
|
cinder/volume/drivers/drbdmanagedrv.py
|
1
|
19926
|
# Copyright (c) 2014 LINBIT HA Solutions GmbH
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This driver connects Cinder to an installed DRBDmanage instance, see
http://oss.linbit.com/drbdmanage/
http://git.linbit.com/drbdmanage.git/
for more details.
"""
import six
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from cinder import exception
from cinder.i18n import _, _LW, _LI
from cinder.volume import driver
try:
import dbus
import drbdmanage.consts as dm_const
import drbdmanage.exceptions as dm_exc
import drbdmanage.utils as dm_utils
except ImportError:
dbus = None
dm_const = None
dm_exc = None
dm_utils = None
LOG = logging.getLogger(__name__)
drbd_opts = [
cfg.StrOpt('drbdmanage_redundancy',
default='1',
help='Number of nodes that should replicate the data.'),
# TODO(PM): offsite_redundancy?
# TODO(PM): choose DRBDmanage storage pool?
]
CONF = cfg.CONF
CONF.register_opts(drbd_opts)
AUX_PROP_CINDER_VOL_ID = "cinder-id"
DM_VN_PREFIX = 'CV_' # sadly 2CV isn't allowed by DRBDmanage
DM_SN_PREFIX = 'SN_'
class DrbdManageDriver(driver.VolumeDriver):
"""Cinder driver that uses DRBDmanage for storage."""
VERSION = '1.0.0'
drbdmanage_dbus_name = 'org.drbd.drbdmanaged'
drbdmanage_dbus_interface = '/interface'
def __init__(self, *args, **kwargs):
self.empty_list = dbus.Array([], signature="a(s)")
self.empty_dict = dbus.Array([], signature="a(ss)")
super(DrbdManageDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(drbd_opts)
if not self.drbdmanage_dbus_name:
self.drbdmanage_dbus_name = 'org.drbd.drbdmanaged'
if not self.drbdmanage_dbus_interface:
self.drbdmanage_dbus_interface = '/interface'
self.drbdmanage_redundancy = int(getattr(self.configuration,
'drbdmanage_redundancy', 1))
self.dm_control_vol = ".drbdctrl"
# Copied from the LVM driver, see
# I43190d1dac33748fe55fa00f260f32ab209be656
target_driver = self.target_mapping[
self.configuration.safe_get('iscsi_helper')]
LOG.debug('Attempting to initialize DRBD driver with the '
'following target_driver: %s',
target_driver)
self.target_driver = importutils.import_object(
target_driver,
configuration=self.configuration,
db=self.db,
executor=self._execute)
def dbus_connect(self):
self.odm = dbus.SystemBus().get_object(self.drbdmanage_dbus_name,
self.drbdmanage_dbus_interface)
self.odm.ping()
def call_or_reconnect(self, fn, *args):
"""Call DBUS function; on a disconnect try once to reconnect."""
try:
return fn(*args)
except dbus.DBusException as e:
LOG.warning(_LW("Got disconnected; trying to reconnect. (%s)"), e)
self.dbus_connect()
# Old function object is invalid, get new one.
return getattr(self.odm, fn._method_name)(*args)
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
super(DrbdManageDriver, self).do_setup(context)
self.dbus_connect()
def check_for_setup_error(self):
"""Verify that requirements are in place to use DRBDmanage driver."""
if not all((dbus, dm_exc, dm_const, dm_utils)):
msg = _('DRBDmanage driver setup error: some required '
'libraries (dbus, drbdmanage.*) not found.')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if self.odm.ping() != 0:
message = _('Cannot ping DRBDmanage backend')
raise exception.VolumeBackendAPIException(data=message)
def _clean_uuid(self):
"""Returns a UUID string, WITHOUT braces."""
# Some uuid library versions put braces around the result!?
# We don't want them, just a plain [0-9a-f-]+ string.
id = str(uuid.uuid4())
id = id.replace("{", "")
id = id.replace("}", "")
return id
def _check_result(self, res, ignore=None, ret=0):
seen_success = False
seen_error = False
result = ret
for (code, fmt, arg_l) in res:
# convert from DBUS to Python
arg = dict(arg_l)
if ignore and code in ignore:
if not result:
result = code
continue
if code == dm_exc.DM_SUCCESS:
seen_success = True
continue
seen_error = _("Received error string: %s") % (fmt % arg)
if seen_error:
raise exception.VolumeBackendAPIException(data=seen_error)
if seen_success:
return ret
# by default okay - or the ignored error code.
return ret
# DRBDmanage works in kiB units; Cinder uses GiB.
def _vol_size_to_dm(self, size):
return int(size * units.Gi / units.Ki)
def _vol_size_to_cinder(self, size):
return int(size * units.Ki / units.Gi)
def is_clean_volume_name(self, name, prefix):
try:
if (name.startswith(CONF.volume_name_template % "") and
uuid.UUID(name[7:]) is not None):
return prefix + name[7:]
except ValueError:
return None
try:
if uuid.UUID(name) is not None:
return prefix + name
except ValueError:
return None
def _priv_hash_from_volume(self, volume):
return dm_utils.dict_to_aux_props({
AUX_PROP_CINDER_VOL_ID: volume['id'],
})
def snapshot_name_from_cinder_snapshot(self, snapshot):
sn_name = self.is_clean_volume_name(snapshot['id'], DM_SN_PREFIX)
return sn_name
def _res_and_vl_data_for_volume(self, volume, empty_ok=False):
"""Find DRBD resource and volume ID.
A DRBD resource might consist of several "volumes"
(think consistency groups).
So we have to find the number of the volume within one resource.
Returns resource name, volume number, and resource
and volume properties.
"""
# If we get a string, use it as-is.
# Else it's a dictionary; then get the ID.
if isinstance(volume, six.string_types):
v_uuid = volume
else:
v_uuid = volume['id']
res, rl = self.call_or_reconnect(self.odm.list_volumes,
self.empty_dict,
0,
dm_utils.dict_to_aux_props(
{AUX_PROP_CINDER_VOL_ID: v_uuid}),
self.empty_dict)
self._check_result(res)
if (not rl) or (len(rl) == 0):
if empty_ok:
LOG.debug("No volume %s found.", v_uuid)
return None, None, None, None
raise exception.VolumeBackendAPIException(
data=_("volume %s not found in drbdmanage") % v_uuid)
if len(rl) > 1:
raise exception.VolumeBackendAPIException(
data=_("multiple resources with name %s found by drbdmanage") %
v_uuid)
(r_name, r_props, vols) = rl[0]
if len(vols) != 1:
raise exception.VolumeBackendAPIException(
data=_("not exactly one volume with id %s") %
v_uuid)
(v_nr, v_props) = vols[0]
LOG.debug("volume %(uuid)s is %(res)s/%(nr)d; %(rprop)s, %(vprop)s",
{'uuid': v_uuid, 'res': r_name, 'nr': v_nr,
'rprop': r_props, 'vprop': v_props})
return r_name, v_nr, r_props, v_props
def _resource_and_snap_data_from_snapshot(self, snapshot, empty_ok=False):
"""Find DRBD resource and snapshot name from the snapshot ID."""
s_uuid = snapshot['id']
res, rs = self.call_or_reconnect(self.odm.list_snapshots,
self.empty_dict,
self.empty_dict,
0,
dm_utils.dict_to_aux_props(
{AUX_PROP_CINDER_VOL_ID: s_uuid}),
self.empty_dict)
self._check_result(res)
if (not rs) or (len(rs) == 0):
if empty_ok:
return None
else:
raise exception.VolumeBackendAPIException(
data=_("no snapshot with id %s found in drbdmanage") %
s_uuid)
if len(rs) > 1:
raise exception.VolumeBackendAPIException(
data=_("multiple resources with snapshot ID %s found") %
s_uuid)
(r_name, snaps) = rs[0]
if len(snaps) != 1:
raise exception.VolumeBackendAPIException(
data=_("not exactly one snapshot with id %s") % s_uuid)
(s_name, s_props) = snaps[0]
LOG.debug("snapshot %(uuid)s is %(res)s/%(snap)s",
{'uuid': s_uuid, 'res': r_name, 'snap': s_name})
return r_name, s_name, s_props
def _resource_name_volnr_for_volume(self, volume, empty_ok=False):
res, vol, _, _ = self._res_and_vl_data_for_volume(volume, empty_ok)
return res, vol
def local_path(self, volume):
dres, dvol = self._resource_name_volnr_for_volume(volume)
res, data = self.call_or_reconnect(self.odm.text_query,
[dm_const.TQ_GET_PATH,
dres,
str(dvol)])
self._check_result(res)
if len(data) == 1:
return data[0]
message = _('Got bad path information from DRBDmanage! (%s)') % data
raise exception.VolumeBackendAPIException(data=message)
def create_volume(self, volume):
"""Creates a DRBD resource.
We address it later on via the ID that gets stored
as a private property.
"""
# TODO(PM): consistency groups
dres = self.is_clean_volume_name(volume['id'], DM_VN_PREFIX)
res = self.call_or_reconnect(self.odm.create_resource,
dres,
self.empty_dict)
self._check_result(res, ignore=[dm_exc.DM_EEXIST], ret=None)
# If we get DM_EEXIST, then the volume already exists, eg. because
# deploy gave an error on a previous try (like ENOSPC).
# Still, there might or might not be the volume in the resource -
# we have to check that explicitly.
(_, drbd_vol) = self._resource_name_volnr_for_volume(volume,
empty_ok=True)
if not drbd_vol:
props = self._priv_hash_from_volume(volume)
# TODO(PM): properties - redundancy, etc
res = self.call_or_reconnect(self.odm.create_volume,
dres,
self._vol_size_to_dm(volume['size']),
props)
self._check_result(res)
# If we crashed between create_volume and the deploy call,
# the volume might be defined but not exist on any server. Oh my.
res = self.call_or_reconnect(self.odm.auto_deploy,
dres, self.drbdmanage_redundancy,
0, True)
self._check_result(res)
return 0
def delete_volume(self, volume):
"""Deletes a resource."""
dres, dvol = self._resource_name_volnr_for_volume(
volume,
empty_ok=True)
if not dres:
# OK, already gone.
return True
# TODO(PM): check if in use? Ask whether Primary, or just check result?
res = self.call_or_reconnect(self.odm.remove_volume, dres, dvol, False)
self._check_result(res, ignore=[dm_exc.DM_ENOENT])
res, rl = self.call_or_reconnect(self.odm.list_volumes,
[dres],
0,
self.empty_dict,
self.empty_list)
self._check_result(res)
# We expect the _resource_ to be here still (we just got a volnr from
# it!), so just query the volumes.
# If the resource has no volumes anymore, the current DRBDmanage
# version (errorneously, IMO) returns no *resource*, too.
if len(rl) > 1:
message = _('DRBDmanage expected one resource ("%(res)s"), '
'got %(n)d') % {'res': dres, 'n': len(rl)}
raise exception.VolumeBackendAPIException(data=message)
# Delete resource, if empty
if (not rl) or (not rl[0]) or (len(rl[0][2]) == 0):
res = self.call_or_reconnect(self.odm.remove_resource, dres, False)
self._check_result(res, ignore=[dm_exc.DM_ENOENT])
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
LOG.debug("create vol from snap: from %(snap)s make %(vol)s",
{'snap': snapshot['id'], 'vol': volume['id']})
# TODO(PM): Consistency groups.
dres, sname, sprop = self._resource_and_snap_data_from_snapshot(
snapshot)
new_res = self.is_clean_volume_name(volume['id'], DM_VN_PREFIX)
r_props = self.empty_dict
# TODO(PM): consistency groups => different volume number possible
v_props = [(0, self._priv_hash_from_volume(volume))]
res = self.call_or_reconnect(self.odm.restore_snapshot,
new_res,
dres,
sname,
r_props,
v_props)
return self._check_result(res, ignore=[dm_exc.DM_ENOENT])
def create_cloned_volume(self, volume, src_vref):
temp_id = self._clean_uuid()
snapshot = {'id': temp_id}
self.create_snapshot({'id': temp_id, 'volume_id': src_vref['id']})
self.create_volume_from_snapshot(volume, snapshot)
self.delete_snapshot(snapshot)
def _update_volume_stats(self):
data = {}
data["vendor_name"] = 'Open Source'
data["driver_version"] = self.VERSION
data["storage_protocol"] = self.target_driver.protocol
# This has to match the name set in the cinder volume driver spec,
# so keep it lowercase
data["volume_backend_name"] = "drbdmanage"
data["pools"] = []
res, free, total = self.call_or_reconnect(self.odm.cluster_free_query,
self.drbdmanage_redundancy)
self._check_result(res)
location_info = ('DrbdManageDriver:%(cvol)s:%(dbus)s' %
{'cvol': self.dm_control_vol,
'dbus': self.drbdmanage_dbus_name})
# TODO(PM): multiple DRBDmanage instances and/or multiple pools
single_pool = {}
single_pool.update(dict(
pool_name=data["volume_backend_name"],
free_capacity_gb=self._vol_size_to_cinder(free),
total_capacity_gb=self._vol_size_to_cinder(total),
reserved_percentage=self.configuration.reserved_percentage,
location_info=location_info,
QoS_support=False))
data["pools"].append(single_pool)
self._stats = data
def get_volume_stats(self, refresh=True):
"""Get volume status."""
self._update_volume_stats()
return self._stats
def extend_volume(self, volume, new_size):
dres, dvol = self._resource_name_volnr_for_volume(volume)
res = self.call_or_reconnect(self.odm.resize_volume,
dres, dvol, -1,
{"size": self._vol_size_to_dm(new_size)},
0)
self._check_result(res)
return 0
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
sn_name = self.snapshot_name_from_cinder_snapshot(snapshot)
dres, dvol = self._resource_name_volnr_for_volume(
snapshot["volume_id"])
res, data = self.call_or_reconnect(self.odm.list_assignments,
self.empty_dict,
[dres],
0,
self.empty_dict,
self.empty_dict)
self._check_result(res)
nodes = [d[0] for d in data]
if len(nodes) < 1:
raise exception.VolumeBackendAPIException(
_('Snapshot res "%s" that is not deployed anywhere?') %
(dres))
props = self._priv_hash_from_volume(snapshot)
res = self.call_or_reconnect(self.odm.create_snapshot,
dres, sn_name, nodes, props)
self._check_result(res)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
dres, sname, _ = self._resource_and_snap_data_from_snapshot(
snapshot, empty_ok=True)
if not dres:
# resource already gone?
LOG.warning(_LW("snapshot: %s not found, "
"skipping delete operation"), snapshot['id'])
LOG.info(_LI('Successfully deleted snapshot: %s'), snapshot['id'])
return True
res = self.call_or_reconnect(self.odm.remove_snapshot,
dres, sname, True)
return self._check_result(res, ignore=[dm_exc.DM_ENOENT])
# ####### Interface methods for DataPath (Target Driver) ########
def ensure_export(self, context, volume):
volume_path = self.local_path(volume)
return self.target_driver.ensure_export(
context,
volume,
volume_path)
def create_export(self, context, volume):
volume_path = self.local_path(volume)
export_info = self.target_driver.create_export(
context,
volume,
volume_path)
return {'provider_location': export_info['location'],
'provider_auth': export_info['auth'], }
def remove_export(self, context, volume):
return self.target_driver.remove_export(context, volume)
def initialize_connection(self, volume, connector):
return self.target_driver.initialize_connection(volume, connector)
def validate_connector(self, connector):
return self.target_driver.validate_connector(connector)
def terminate_connection(self, volume, connector, **kwargs):
return None
|
apache-2.0
| -4,814,244,151,652,230,000
| 36.596226
| 79
| 0.543812
| false
| 4.053295
| true
| false
| false
|
lylejohnson/FXPy
|
src/controls.py
|
1
|
313359
|
# This file was created automatically by SWIG.
import controlsc
from misc import *
from windows import *
from containers import *
import fox
class FX_LabelPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onPaint,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onQueryTip,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getText,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setText,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setIcon,(self,) + _args, _kwargs)
return val
def getIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getIcon,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setTextColor,(self,) + _args, _kwargs)
return val
def setJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setJustify,(self,) + _args, _kwargs)
return val
def getJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getJustify,(self,) + _args, _kwargs)
return val
def setIconPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setIconPosition,(self,) + _args, _kwargs)
return val
def getIconPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getIconPosition,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Label instance at %s>" % (self.this,)
class FX_Label(FX_LabelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Label,_args,_kwargs)
self.thisown = 1
class FXLabelPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXLabel instance at %s>" % (self.this,)
class FXLabel(FXLabelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXLabel,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_DialPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onPaint,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onMotion,(self,) + _args, _kwargs)
return val
def onMouseWheel(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onMouseWheel,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onUngrabbed,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetRealValue,(self,) + _args, _kwargs)
return val
def onCmdGetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetRealValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def onCmdSetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetRealRange,(self,) + _args, _kwargs)
return val
def onCmdGetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetRealRange,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onQueryTip,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getRange,(self,) + _args, _kwargs)
return val
def setValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setValue,(self,) + _args, _kwargs)
return val
def getValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getValue,(self,) + _args, _kwargs)
return val
def setRevolutionIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setRevolutionIncrement,(self,) + _args, _kwargs)
return val
def getRevolutionIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getRevolutionIncrement,(self,) + _args, _kwargs)
return val
def setNotchSpacing(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setNotchSpacing,(self,) + _args, _kwargs)
return val
def getNotchSpacing(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getNotchSpacing,(self,) + _args, _kwargs)
return val
def setNotchOffset(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setNotchOffset,(self,) + _args, _kwargs)
return val
def getNotchOffset(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getNotchOffset,(self,) + _args, _kwargs)
return val
def getDialStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getDialStyle,(self,) + _args, _kwargs)
return val
def setDialStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setDialStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Dial instance at %s>" % (self.this,)
class FX_Dial(FX_DialPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Dial,_args,_kwargs)
self.thisown = 1
class FXDialPtr(FX_DialPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXDial instance at %s>" % (self.this,)
class FXDial(FXDialPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXDial,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ColorWellPtr(FX_FramePtr):
ID_COLORDIALOG = controlsc.FX_ColorWell_ID_COLORDIALOG
ID_LAST = controlsc.FX_ColorWell_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onKeyRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onUngrabbed,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onMotion,(self,) + _args, _kwargs)
return val
def onBeginDrag(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onBeginDrag,(self,) + _args, _kwargs)
return val
def onEndDrag(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onEndDrag,(self,) + _args, _kwargs)
return val
def onDragged(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDragged,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onFocusOut,(self,) + _args, _kwargs)
return val
def onDNDEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDEnter,(self,) + _args, _kwargs)
return val
def onDNDLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDLeave,(self,) + _args, _kwargs)
return val
def onDNDMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDMotion,(self,) + _args, _kwargs)
return val
def onDNDDrop(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDDrop,(self,) + _args, _kwargs)
return val
def onDNDRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDRequest,(self,) + _args, _kwargs)
return val
def onSelectionLost(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onSelectionLost,(self,) + _args, _kwargs)
return val
def onSelectionGained(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onSelectionGained,(self,) + _args, _kwargs)
return val
def onSelectionRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onSelectionRequest,(self,) + _args, _kwargs)
return val
def onClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onClicked,(self,) + _args, _kwargs)
return val
def onDoubleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDoubleClicked,(self,) + _args, _kwargs)
return val
def onTripleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onTripleClicked,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdColorWell(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdColorWell,(self,) + _args, _kwargs)
return val
def onChgColorWell(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onChgColorWell,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setRGBA(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setRGBA,(self,) + _args, _kwargs)
return val
def getRGBA(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_getRGBA,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_getTipText,(self,) + _args, _kwargs)
return val
def isOpaqueOnly(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_isOpaqueOnly,(self,) + _args, _kwargs)
return val
def setOpaqueOnly(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setOpaqueOnly,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ColorWell instance at %s>" % (self.this,)
class FX_ColorWell(FX_ColorWellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ColorWell,_args,_kwargs)
self.thisown = 1
class FXColorWellPtr(FX_ColorWellPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXColorWell instance at %s>" % (self.this,)
class FXColorWell(FXColorWellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXColorWell,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TextFieldPtr(FX_FramePtr):
ID_CURSOR_HOME = controlsc.FX_TextField_ID_CURSOR_HOME
ID_CURSOR_END = controlsc.FX_TextField_ID_CURSOR_END
ID_CURSOR_RIGHT = controlsc.FX_TextField_ID_CURSOR_RIGHT
ID_CURSOR_LEFT = controlsc.FX_TextField_ID_CURSOR_LEFT
ID_MARK = controlsc.FX_TextField_ID_MARK
ID_EXTEND = controlsc.FX_TextField_ID_EXTEND
ID_SELECT_ALL = controlsc.FX_TextField_ID_SELECT_ALL
ID_DESELECT_ALL = controlsc.FX_TextField_ID_DESELECT_ALL
ID_CUT_SEL = controlsc.FX_TextField_ID_CUT_SEL
ID_COPY_SEL = controlsc.FX_TextField_ID_COPY_SEL
ID_PASTE_SEL = controlsc.FX_TextField_ID_PASTE_SEL
ID_DELETE_SEL = controlsc.FX_TextField_ID_DELETE_SEL
ID_OVERST_STRING = controlsc.FX_TextField_ID_OVERST_STRING
ID_INSERT_STRING = controlsc.FX_TextField_ID_INSERT_STRING
ID_BACKSPACE = controlsc.FX_TextField_ID_BACKSPACE
ID_DELETE = controlsc.FX_TextField_ID_DELETE
ID_TOGGLE_EDITABLE = controlsc.FX_TextField_ID_TOGGLE_EDITABLE
ID_TOGGLE_OVERSTRIKE = controlsc.FX_TextField_ID_TOGGLE_OVERSTRIKE
ID_BLINK = controlsc.FX_TextField_ID_BLINK
ID_LAST = controlsc.FX_TextField_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onPaint,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onKeyRelease,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onVerify(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onVerify,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onMotion,(self,) + _args, _kwargs)
return val
def onSelectionLost(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onSelectionLost,(self,) + _args, _kwargs)
return val
def onSelectionGained(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onSelectionGained,(self,) + _args, _kwargs)
return val
def onSelectionRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onSelectionRequest,(self,) + _args, _kwargs)
return val
def onClipboardLost(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onClipboardLost,(self,) + _args, _kwargs)
return val
def onClipboardGained(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onClipboardGained,(self,) + _args, _kwargs)
return val
def onClipboardRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onClipboardRequest,(self,) + _args, _kwargs)
return val
def onFocusSelf(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onFocusSelf,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onFocusOut,(self,) + _args, _kwargs)
return val
def onBlink(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onBlink,(self,) + _args, _kwargs)
return val
def onAutoScroll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onAutoScroll,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetRealValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdGetRealValue,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdCursorHome(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorHome,(self,) + _args, _kwargs)
return val
def onCmdCursorEnd(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorEnd,(self,) + _args, _kwargs)
return val
def onCmdCursorRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorRight,(self,) + _args, _kwargs)
return val
def onCmdCursorLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorLeft,(self,) + _args, _kwargs)
return val
def onCmdMark(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdMark,(self,) + _args, _kwargs)
return val
def onCmdExtend(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdExtend,(self,) + _args, _kwargs)
return val
def onCmdSelectAll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSelectAll,(self,) + _args, _kwargs)
return val
def onCmdDeselectAll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdDeselectAll,(self,) + _args, _kwargs)
return val
def onCmdCutSel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCutSel,(self,) + _args, _kwargs)
return val
def onCmdCopySel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCopySel,(self,) + _args, _kwargs)
return val
def onCmdPasteSel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdPasteSel,(self,) + _args, _kwargs)
return val
def onCmdDeleteSel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdDeleteSel,(self,) + _args, _kwargs)
return val
def onCmdOverstString(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdOverstString,(self,) + _args, _kwargs)
return val
def onCmdInsertString(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdInsertString,(self,) + _args, _kwargs)
return val
def onCmdBackspace(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdBackspace,(self,) + _args, _kwargs)
return val
def onCmdDelete(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdDelete,(self,) + _args, _kwargs)
return val
def onCmdToggleEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdToggleEditable,(self,) + _args, _kwargs)
return val
def onUpdToggleEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onUpdToggleEditable,(self,) + _args, _kwargs)
return val
def onCmdToggleOverstrike(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdToggleOverstrike,(self,) + _args, _kwargs)
return val
def onUpdToggleOverstrike(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onUpdToggleOverstrike,(self,) + _args, _kwargs)
return val
def isEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_isEditable,(self,) + _args, _kwargs)
return val
def setEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setEditable,(self,) + _args, _kwargs)
return val
def setCursorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setCursorPos,(self,) + _args, _kwargs)
return val
def getCursorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getCursorPos,(self,) + _args, _kwargs)
return val
def setAnchorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setAnchorPos,(self,) + _args, _kwargs)
return val
def getAnchorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getAnchorPos,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getText,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getFont,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getTextColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getSelTextColor,(self,) + _args, _kwargs)
return val
def setNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setNumColumns,(self,) + _args, _kwargs)
return val
def getNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getNumColumns,(self,) + _args, _kwargs)
return val
def setJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setJustify,(self,) + _args, _kwargs)
return val
def getJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getJustify,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getTipText,(self,) + _args, _kwargs)
return val
def setTextStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setTextStyle,(self,) + _args, _kwargs)
return val
def getTextStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getTextStyle,(self,) + _args, _kwargs)
return val
def selectAll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_selectAll,(self,) + _args, _kwargs)
return val
def setSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setSelection,(self,) + _args, _kwargs)
return val
def extendSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_extendSelection,(self,) + _args, _kwargs)
return val
def killSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_killSelection,(self,) + _args, _kwargs)
return val
def isPosSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_isPosSelected,(self,) + _args, _kwargs)
return val
def isPosVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_isPosVisible,(self,) + _args, _kwargs)
return val
def makePositionVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_makePositionVisible,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TextField instance at %s>" % (self.this,)
class FX_TextField(FX_TextFieldPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TextField,_args,_kwargs)
self.thisown = 1
class FXTextFieldPtr(FX_TextFieldPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTextField instance at %s>" % (self.this,)
class FXTextField(FXTextFieldPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTextField,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onUncheck,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setState(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_setState,(self,) + _args, _kwargs)
return val
def getState(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_getState,(self,) + _args, _kwargs)
return val
def setButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_setButtonStyle,(self,) + _args, _kwargs)
return val
def getButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_getButtonStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Button instance at %s>" % (self.this,)
class FX_Button(FX_ButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Button,_args,_kwargs)
self.thisown = 1
class FXButtonPtr(FX_ButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXButton instance at %s>" % (self.this,)
class FXButton(FXButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToggleButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onUncheck,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setState,(self,) + _args, _kwargs)
return val
def getState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getState,(self,) + _args, _kwargs)
return val
def setAltText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltText,(self,) + _args, _kwargs)
return val
def getAltText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltText,(self,) + _args, _kwargs)
return val
def setAltIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltIcon,(self,) + _args, _kwargs)
return val
def getAltIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltIcon,(self,) + _args, _kwargs)
return val
def setAltHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltHelpText,(self,) + _args, _kwargs)
return val
def getAltHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltHelpText,(self,) + _args, _kwargs)
return val
def setAltTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltTipText,(self,) + _args, _kwargs)
return val
def getAltTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToggleButton instance at %s>" % (self.this,)
class FX_ToggleButton(FX_ToggleButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToggleButton,_args,_kwargs)
self.thisown = 1
class FXToggleButtonPtr(FX_ToggleButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToggleButton instance at %s>" % (self.this,)
class FXToggleButton(FXToggleButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToggleButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_RadioButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onUncheckRadio(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUncheckRadio,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUncheck,(self,) + _args, _kwargs)
return val
def onUnknown(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUnknown,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_setCheck,(self,) + _args, _kwargs)
return val
def getCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_getCheck,(self,) + _args, _kwargs)
return val
def setRadioButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_setRadioButtonStyle,(self,) + _args, _kwargs)
return val
def getRadioButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_getRadioButtonStyle,(self,) + _args, _kwargs)
return val
def getRadioColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_getRadioColor,(self,) + _args, _kwargs)
return val
def setRadioColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_setRadioColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_RadioButton instance at %s>" % (self.this,)
class FX_RadioButton(FX_RadioButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_RadioButton,_args,_kwargs)
self.thisown = 1
class FXRadioButtonPtr(FX_RadioButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXRadioButton instance at %s>" % (self.this,)
class FXRadioButton(FXRadioButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXRadioButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_CheckButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUncheck,(self,) + _args, _kwargs)
return val
def onUnknown(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUnknown,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_setCheck,(self,) + _args, _kwargs)
return val
def getCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_getCheck,(self,) + _args, _kwargs)
return val
def getBoxColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_getBoxColor,(self,) + _args, _kwargs)
return val
def setBoxColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_setBoxColor,(self,) + _args, _kwargs)
return val
def setCheckButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_setCheckButtonStyle,(self,) + _args, _kwargs)
return val
def getCheckButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_getCheckButtonStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_CheckButton instance at %s>" % (self.this,)
class FX_CheckButton(FX_CheckButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_CheckButton,_args,_kwargs)
self.thisown = 1
class FXCheckButtonPtr(FX_CheckButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXCheckButton instance at %s>" % (self.this,)
class FXCheckButton(FXCheckButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXCheckButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ArrowButtonPtr(FX_FramePtr):
ID_REPEAT = controlsc.FX_ArrowButton_ID_REPEAT
ID_LAST = controlsc.FX_ArrowButton_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onLeave,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onRepeat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onRepeat,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onQueryTip,(self,) + _args, _kwargs)
return val
def setState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setState,(self,) + _args, _kwargs)
return val
def getState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getState,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getTipText,(self,) + _args, _kwargs)
return val
def setArrowStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setArrowStyle,(self,) + _args, _kwargs)
return val
def getArrowStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getArrowStyle,(self,) + _args, _kwargs)
return val
def setArrowSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setArrowSize,(self,) + _args, _kwargs)
return val
def getArrowSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getArrowSize,(self,) + _args, _kwargs)
return val
def setJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setJustify,(self,) + _args, _kwargs)
return val
def getJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getJustify,(self,) + _args, _kwargs)
return val
def getArrowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getArrowColor,(self,) + _args, _kwargs)
return val
def setArrowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setArrowColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ArrowButton instance at %s>" % (self.this,)
class FX_ArrowButton(FX_ArrowButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ArrowButton,_args,_kwargs)
self.thisown = 1
class FXArrowButtonPtr(FX_ArrowButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXArrowButton instance at %s>" % (self.this,)
class FXArrowButton(FXArrowButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXArrowButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_PickerPtr(FX_ButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onMotion,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onLeave,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Picker instance at %s>" % (self.this,)
class FX_Picker(FX_PickerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Picker,_args,_kwargs)
self.thisown = 1
class FXPickerPtr(FX_PickerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXPicker instance at %s>" % (self.this,)
class FXPicker(FXPickerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXPicker,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_SpinnerPtr(FX_PackerPtr):
ID_INCREMENT = controlsc.FX_Spinner_ID_INCREMENT
ID_DECREMENT = controlsc.FX_Spinner_ID_DECREMENT
ID_ENTRY = controlsc.FX_Spinner_ID_ENTRY
ID_LAST = controlsc.FX_Spinner_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onUpdIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onUpdIncrement,(self,) + _args, _kwargs)
return val
def onCmdIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdIncrement,(self,) + _args, _kwargs)
return val
def onUpdDecrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onUpdDecrement,(self,) + _args, _kwargs)
return val
def onCmdDecrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdDecrement,(self,) + _args, _kwargs)
return val
def onCmdEntry(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdEntry,(self,) + _args, _kwargs)
return val
def onChgEntry(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onChgEntry,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def increment(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_increment,(self,) + _args, _kwargs)
return val
def decrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_decrement,(self,) + _args, _kwargs)
return val
def isCyclic(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_isCyclic,(self,) + _args, _kwargs)
return val
def setCyclic(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setCyclic,(self,) + _args, _kwargs)
return val
def isTextVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_isTextVisible,(self,) + _args, _kwargs)
return val
def setTextVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setTextVisible,(self,) + _args, _kwargs)
return val
def setValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setValue,(self,) + _args, _kwargs)
return val
def getValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getValue,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getRange,(self,) + _args, _kwargs)
return val
def setIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setIncrement,(self,) + _args, _kwargs)
return val
def getIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getIncrement,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getFont,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getTipText,(self,) + _args, _kwargs)
return val
def setSpinnerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setSpinnerStyle,(self,) + _args, _kwargs)
return val
def getSpinnerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getSpinnerStyle,(self,) + _args, _kwargs)
return val
def setEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setEditable,(self,) + _args, _kwargs)
return val
def isEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_isEditable,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Spinner instance at %s>" % (self.this,)
class FX_Spinner(FX_SpinnerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Spinner,_args,_kwargs)
self.thisown = 1
class FXSpinnerPtr(FX_SpinnerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXSpinner instance at %s>" % (self.this,)
class FXSpinner(FXSpinnerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXSpinner,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TooltipPtr(FX_ShellPtr):
ID_TIP_SHOW = controlsc.FX_Tooltip_ID_TIP_SHOW
ID_TIP_HIDE = controlsc.FX_Tooltip_ID_TIP_HIDE
ID_LAST = controlsc.FX_Tooltip_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onUpdate,(self,) + _args, _kwargs)
return val
def onTipShow(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onTipShow,(self,) + _args, _kwargs)
return val
def onTipHide(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onTipHide,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_setText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_getText,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_setTextColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Tooltip instance at %s>" % (self.this,)
class FX_Tooltip(FX_TooltipPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Tooltip,_args,_kwargs)
self.thisown = 1
class FXTooltipPtr(FX_TooltipPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTooltip instance at %s>" % (self.this,)
class FXTooltip(FXTooltipPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTooltip,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_OptionPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onPaint,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onLeave,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Option instance at %s>" % (self.this,)
class FX_Option(FX_OptionPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Option,_args,_kwargs)
self.thisown = 1
class FXOptionPtr(FX_OptionPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXOption instance at %s>" % (self.this,)
class FXOption(FXOptionPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXOption,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_OptionMenuPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onFocusOut,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onMotion,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdPost(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdPost,(self,) + _args, _kwargs)
return val
def onCmdUnpost(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdUnpost,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_setCurrent,(self,) + _args, _kwargs)
return val
def getCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_getCurrent,(self,) + _args, _kwargs)
return val
def setCurrentNo(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_setCurrentNo,(self,) + _args, _kwargs)
return val
def getCurrentNo(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_getCurrentNo,(self,) + _args, _kwargs)
return val
def setPopup(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_setPopup,(self,) + _args, _kwargs)
return val
def getPopup(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_getPopup,(self,) + _args, _kwargs)
return val
def isPopped(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_isPopped,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_OptionMenu instance at %s>" % (self.this,)
class FX_OptionMenu(FX_OptionMenuPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_OptionMenu,_args,_kwargs)
self.thisown = 1
class FXOptionMenuPtr(FX_OptionMenuPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXOptionMenu instance at %s>" % (self.this,)
class FXOptionMenu(FXOptionMenuPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXOptionMenu,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TabBarPtr(FX_PackerPtr):
ID_OPEN_ITEM = controlsc.FX_TabBar_ID_OPEN_ITEM
ID_OPEN_FIRST = controlsc.FX_TabBar_ID_OPEN_FIRST
ID_OPEN_SECOND = controlsc.FX_TabBar_ID_OPEN_SECOND
ID_OPEN_THIRD = controlsc.FX_TabBar_ID_OPEN_THIRD
ID_OPEN_FOURTH = controlsc.FX_TabBar_ID_OPEN_FOURTH
ID_OPEN_FIFTH = controlsc.FX_TabBar_ID_OPEN_FIFTH
ID_OPEN_SIXTH = controlsc.FX_TabBar_ID_OPEN_SIXTH
ID_OPEN_SEVENTH = controlsc.FX_TabBar_ID_OPEN_SEVENTH
ID_OPEN_EIGHTH = controlsc.FX_TabBar_ID_OPEN_EIGHTH
ID_OPEN_NINETH = controlsc.FX_TabBar_ID_OPEN_NINETH
ID_OPEN_TENTH = controlsc.FX_TabBar_ID_OPEN_TENTH
ID_OPEN_LAST = controlsc.FX_TabBar_ID_OPEN_LAST
ID_LAST = controlsc.FX_TabBar_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onPaint,(self,) + _args, _kwargs)
return val
def onFocusNext(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusNext,(self,) + _args, _kwargs)
return val
def onFocusPrev(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusPrev,(self,) + _args, _kwargs)
return val
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusDown,(self,) + _args, _kwargs)
return val
def onFocusLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusLeft,(self,) + _args, _kwargs)
return val
def onFocusRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusRight,(self,) + _args, _kwargs)
return val
def onCmdOpenItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdOpenItem,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdOpen(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdOpen,(self,) + _args, _kwargs)
return val
def onUpdOpen(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onUpdOpen,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_setCurrent,(self,) + _args, _kwargs)
return val
def getCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_getCurrent,(self,) + _args, _kwargs)
return val
def getTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_getTabStyle,(self,) + _args, _kwargs)
return val
def setTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_setTabStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TabBar instance at %s>" % (self.this,)
class FX_TabBar(FX_TabBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TabBar,_args,_kwargs)
self.thisown = 1
class FXTabBarPtr(FX_TabBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setBackColor,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setCurrent,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTabBar instance at %s>" % (self.this,)
class FXTabBar(FXTabBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTabBar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TabItemPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onPaint,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def getTabOrientation(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_getTabOrientation,(self,) + _args, _kwargs)
return val
def setTabOrientation(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_setTabOrientation,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TabItem instance at %s>" % (self.this,)
class FX_TabItem(FX_TabItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TabItem,_args,_kwargs)
self.thisown = 1
class FXTabItemPtr(FX_TabItemPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTabItem instance at %s>" % (self.this,)
class FXTabItem(FXTabItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTabItem,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TabBookPtr(FX_TabBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onPaint,(self,) + _args, _kwargs)
return val
def onFocusNext(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusNext,(self,) + _args, _kwargs)
return val
def onFocusPrev(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusPrev,(self,) + _args, _kwargs)
return val
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusDown,(self,) + _args, _kwargs)
return val
def onFocusLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusLeft,(self,) + _args, _kwargs)
return val
def onFocusRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusRight,(self,) + _args, _kwargs)
return val
def onCmdOpenItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onCmdOpenItem,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TabBook instance at %s>" % (self.this,)
class FX_TabBook(FX_TabBookPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TabBook,_args,_kwargs)
self.thisown = 1
class FXTabBookPtr(FX_TabBookPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setBackColor,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setCurrent,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTabBook instance at %s>" % (self.this,)
class FXTabBook(FXTabBookPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTabBook,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ScrollbarPtr(FX_WindowPtr):
ID_TIMEWHEEL = controlsc.FX_Scrollbar_ID_TIMEWHEEL
ID_AUTOINC_LINE = controlsc.FX_Scrollbar_ID_AUTOINC_LINE
ID_AUTODEC_LINE = controlsc.FX_Scrollbar_ID_AUTODEC_LINE
ID_AUTOINC_PAGE = controlsc.FX_Scrollbar_ID_AUTOINC_PAGE
ID_AUTODEC_PAGE = controlsc.FX_Scrollbar_ID_AUTODEC_PAGE
ID_AUTOINC_PIX = controlsc.FX_Scrollbar_ID_AUTOINC_PIX
ID_AUTODEC_PIX = controlsc.FX_Scrollbar_ID_AUTODEC_PIX
ID_LAST = controlsc.FX_Scrollbar_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onPaint,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMotion,(self,) + _args, _kwargs)
return val
def onMouseWheel(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMouseWheel,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onRightBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onRightBtnPress,(self,) + _args, _kwargs)
return val
def onRightBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onRightBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onUngrabbed,(self,) + _args, _kwargs)
return val
def onTimeIncPix(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeIncPix,(self,) + _args, _kwargs)
return val
def onTimeIncLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeIncLine,(self,) + _args, _kwargs)
return val
def onTimeIncPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeIncPage,(self,) + _args, _kwargs)
return val
def onTimeDecPix(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeDecPix,(self,) + _args, _kwargs)
return val
def onTimeDecLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeDecLine,(self,) + _args, _kwargs)
return val
def onTimeDecPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeDecPage,(self,) + _args, _kwargs)
return val
def onTimeWheel(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeWheel,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getRange,(self,) + _args, _kwargs)
return val
def setPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setPage,(self,) + _args, _kwargs)
return val
def getPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getPage,(self,) + _args, _kwargs)
return val
def setLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setLine,(self,) + _args, _kwargs)
return val
def getLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getLine,(self,) + _args, _kwargs)
return val
def setPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setPosition,(self,) + _args, _kwargs)
return val
def getPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getPosition,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setHiliteColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getHiliteColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setShadowColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getShadowColor,(self,) + _args, _kwargs)
return val
def getBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getBorderColor,(self,) + _args, _kwargs)
return val
def setBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setBorderColor,(self,) + _args, _kwargs)
return val
def getScrollbarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getScrollbarStyle,(self,) + _args, _kwargs)
return val
def setScrollbarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setScrollbarStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Scrollbar instance at %s>" % (self.this,)
class FX_Scrollbar(FX_ScrollbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Scrollbar,_args,_kwargs)
self.thisown = 1
class FXScrollbarPtr(FX_ScrollbarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXScrollbar instance at %s>" % (self.this,)
class FXScrollbar(FXScrollbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXScrollbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ScrollCornerPtr(FX_WindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ScrollCorner_onPaint,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ScrollCorner instance at %s>" % (self.this,)
class FX_ScrollCorner(FX_ScrollCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ScrollCorner,_args,_kwargs)
self.thisown = 1
class FXScrollCornerPtr(FX_ScrollCornerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXScrollCorner instance at %s>" % (self.this,)
class FXScrollCorner(FXScrollCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXScrollCorner,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ListItemPtr(FX_ObjectPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getText,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setText,(self,) + _args, _kwargs)
return val
def getIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getIcon,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setIcon,(self,) + _args, _kwargs)
return val
def setData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setData,(self,) + _args, _kwargs)
return val
def getData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getData,(self,) + _args, _kwargs)
return val
def hasFocus(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_hasFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setFocus,(self,) + _args, _kwargs)
return val
def isSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isSelected,(self,) + _args, _kwargs)
return val
def setSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setSelected,(self,) + _args, _kwargs)
return val
def isEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isEnabled,(self,) + _args, _kwargs)
return val
def setEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setEnabled,(self,) + _args, _kwargs)
return val
def isDraggable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isDraggable,(self,) + _args, _kwargs)
return val
def setDraggable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setDraggable,(self,) + _args, _kwargs)
return val
def isIconOwned(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isIconOwned,(self,) + _args, _kwargs)
return val
def setIconOwned(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setIconOwned,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ListItem instance at %s>" % (self.this,)
class FX_ListItem(FX_ListItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ListItem,_args,_kwargs)
self.thisown = 1
class FXListItemPtr(FX_ListItemPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_onDefault,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setText,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setIcon,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setFocus,(self,) + _args, _kwargs)
return val
def setSelected(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setSelected,(self,) + _args, _kwargs)
return val
def setEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setEnabled,(self,) + _args, _kwargs)
return val
def setDraggable(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setDraggable,(self,) + _args, _kwargs)
return val
def setIconOwned(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setIconOwned,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXListItem instance at %s>" % (self.this,)
class FXListItem(FXListItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXListItem,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ListPtr(FX_ScrollAreaPtr):
ID_TIPTIMER = controlsc.FX_List_ID_TIPTIMER
ID_LOOKUPTIMER = controlsc.FX_List_ID_LOOKUPTIMER
ID_LAST = controlsc.FX_List_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onPaint,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLeave,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onUngrabbed,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onKeyRelease,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onRightBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onRightBtnPress,(self,) + _args, _kwargs)
return val
def onRightBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onRightBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onMotion,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onFocusOut,(self,) + _args, _kwargs)
return val
def onAutoScroll(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onAutoScroll,(self,) + _args, _kwargs)
return val
def onClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onClicked,(self,) + _args, _kwargs)
return val
def onDoubleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onDoubleClicked,(self,) + _args, _kwargs)
return val
def onTripleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onTripleClicked,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onQueryTip,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onQueryHelp,(self,) + _args, _kwargs)
return val
def onCommand(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCommand,(self,) + _args, _kwargs)
return val
def onTipTimer(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onTipTimer,(self,) + _args, _kwargs)
return val
def onLookupTimer(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLookupTimer,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getNumItems,(self,) + _args, _kwargs)
return val
def getNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getNumVisible,(self,) + _args, _kwargs)
return val
def setNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setNumVisible,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_retrieveItem,(self,) + _args, _kwargs)
if val: val = FX_ListItemPtr(val)
return val
def insertItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_insertItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_insertItemStr,(self,) + _args, _kwargs)
return val
def insertItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_insertItemStr,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_replaceItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_replaceItemStr,(self,) + _args, _kwargs)
return val
def replaceItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_replaceItemStr,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_appendItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_appendItemStr,(self,) + _args, _kwargs)
return val
def appendItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_appendItemStr,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_prependItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_prependItemStr,(self,) + _args, _kwargs)
return val
def prependItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_prependItemStr,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_clearItems,(self,) + _args, _kwargs)
return val
def getItemWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemWidth,(self,) + _args, _kwargs)
return val
def getItemHeight(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemHeight,(self,) + _args, _kwargs)
return val
def getItemAt(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemAt,(self,) + _args, _kwargs)
return val
def hitItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_hitItem,(self,) + _args, _kwargs)
return val
def findItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_findItem,(self,) + _args, _kwargs)
return val
def makeItemVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_makeItemVisible,(self,) + _args, _kwargs)
return val
def updateItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_updateItem,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemText,(self,) + _args, _kwargs)
return val
def setItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setItemIcon,(self,) + _args, _kwargs)
return val
def getItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemIcon,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemData,(self,) + _args, _kwargs)
return val
def isItemSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemSelected,(self,) + _args, _kwargs)
return val
def isItemCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemCurrent,(self,) + _args, _kwargs)
return val
def isItemVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemVisible,(self,) + _args, _kwargs)
return val
def isItemEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemEnabled,(self,) + _args, _kwargs)
return val
def enableItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_enableItem,(self,) + _args, _kwargs)
return val
def disableItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_disableItem,(self,) + _args, _kwargs)
return val
def selectItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_selectItem,(self,) + _args, _kwargs)
return val
def deselectItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_deselectItem,(self,) + _args, _kwargs)
return val
def toggleItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_toggleItem,(self,) + _args, _kwargs)
return val
def setCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setCurrentItem,(self,) + _args, _kwargs)
return val
def getCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getCurrentItem,(self,) + _args, _kwargs)
return val
def setAnchorItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setAnchorItem,(self,) + _args, _kwargs)
return val
def getAnchorItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getAnchorItem,(self,) + _args, _kwargs)
return val
def getCursorItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getCursorItem,(self,) + _args, _kwargs)
return val
def extendSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_extendSelection,(self,) + _args, _kwargs)
return val
def killSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_killSelection,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setTextColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getSelTextColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getSortFunc,(self,) + _args, _kwargs)
return val
def setSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setSortFunc,(self,) + _args, _kwargs)
return val
def getListStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getListStyle,(self,) + _args, _kwargs)
return val
def setListStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setListStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getHelpText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_List instance at %s>" % (self.this,)
class FX_List(FX_ListPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_List,_args,_kwargs)
self.thisown = 1
class FXListPtr(FX_ListPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXList_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXList_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXList_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXList_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXList_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXList_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXList_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXList_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXList_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXList_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXList_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXList_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXList_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXList_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXList_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXList_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXList_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXList_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXList_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXList_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXList_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXList_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXList_setBackColor,(self,) + _args, _kwargs)
return val
def getContentWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getContentWidth,(self,) + _args, _kwargs)
return val
def getContentHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getContentHeight,(self,) + _args, _kwargs)
return val
def getViewportWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getViewportWidth,(self,) + _args, _kwargs)
return val
def getViewportHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getViewportHeight,(self,) + _args, _kwargs)
return val
def moveContents(self, *_args, **_kwargs):
val = apply(controlsc.FXList_moveContents,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXList instance at %s>" % (self.this,)
class FXList(FXListPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXList,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ComboBoxPtr(FX_PackerPtr):
ID_LIST = controlsc.FX_ComboBox_ID_LIST
ID_TEXT = controlsc.FX_ComboBox_ID_TEXT
ID_LAST = controlsc.FX_ComboBox_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onFocusDown,(self,) + _args, _kwargs)
return val
def onTextButton(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onTextButton,(self,) + _args, _kwargs)
return val
def onTextChanged(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onTextChanged,(self,) + _args, _kwargs)
return val
def onTextCommand(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onTextCommand,(self,) + _args, _kwargs)
return val
def onListClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onListClicked,(self,) + _args, _kwargs)
return val
def onFwdToText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onFwdToText,(self,) + _args, _kwargs)
return val
def onUpdFmText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onUpdFmText,(self,) + _args, _kwargs)
return val
def isEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_isEditable,(self,) + _args, _kwargs)
return val
def setEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setEditable,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getText,(self,) + _args, _kwargs)
return val
def setNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setNumColumns,(self,) + _args, _kwargs)
return val
def getNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getNumColumns,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getNumItems,(self,) + _args, _kwargs)
return val
def getNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getNumVisible,(self,) + _args, _kwargs)
return val
def setNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setNumVisible,(self,) + _args, _kwargs)
return val
def isItemCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_isItemCurrent,(self,) + _args, _kwargs)
return val
def setCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setCurrentItem,(self,) + _args, _kwargs)
return val
def getCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getCurrentItem,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_retrieveItem,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_replaceItem,(self,) + _args, _kwargs)
return val
def insertItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_insertItem,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_appendItem,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_prependItem,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_clearItems,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getItemText,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getItemData,(self,) + _args, _kwargs)
return val
def isPaneShown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_isPaneShown,(self,) + _args, _kwargs)
return val
def sortItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_sortItems,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getFont,(self,) + _args, _kwargs)
return val
def setComboStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setComboStyle,(self,) + _args, _kwargs)
return val
def getComboStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getComboStyle,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setBackColor,(self,) + _args, _kwargs)
return val
def getBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getBackColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getTextColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getSelTextColor,(self,) + _args, _kwargs)
return val
def getSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getSortFunc,(self,) + _args, _kwargs)
return val
def setSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setSortFunc,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ComboBox instance at %s>" % (self.this,)
class FX_ComboBox(FX_ComboBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ComboBox,_args,_kwargs)
self.thisown = 1
class FXComboBoxPtr(FX_ComboBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXComboBox instance at %s>" % (self.this,)
class FXComboBox(FXComboBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXComboBox,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_DragCornerPtr(FX_WindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onMotion,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_getHiliteColor,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_setHiliteColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_getShadowColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_setShadowColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_DragCorner instance at %s>" % (self.this,)
class FX_DragCorner(FX_DragCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_DragCorner,_args,_kwargs)
self.thisown = 1
class FXDragCornerPtr(FX_DragCornerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXDragCorner instance at %s>" % (self.this,)
class FXDragCorner(FXDragCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXDragCorner,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_StatuslinePtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onUpdate,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setText,(self,) + _args, _kwargs)
return val
def setNormalText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setNormalText,(self,) + _args, _kwargs)
return val
def getNormalText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getNormalText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getText,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setTextColor,(self,) + _args, _kwargs)
return val
def getTextHighlightColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getTextHighlightColor,(self,) + _args, _kwargs)
return val
def setTextHighlightColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setTextHighlightColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Statusline instance at %s>" % (self.this,)
class FX_Statusline(FX_StatuslinePtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Statusline,_args,_kwargs)
self.thisown = 1
class FXStatuslinePtr(FX_StatuslinePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXStatusline instance at %s>" % (self.this,)
class FXStatusline(FXStatuslinePtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXStatusline,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_StatusbarPtr(FX_HorizontalFramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def setCornerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_setCornerStyle,(self,) + _args, _kwargs)
return val
def getCornerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_getCornerStyle,(self,) + _args, _kwargs)
return val
def getStatusline(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_getStatusline,(self,) + _args, _kwargs)
return val
def getDragCorner(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_getDragCorner,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Statusbar instance at %s>" % (self.this,)
class FX_Statusbar(FX_StatusbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Statusbar,_args,_kwargs)
self.thisown = 1
class FXStatusbarPtr(FX_StatusbarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXStatusbar instance at %s>" % (self.this,)
class FXStatusbar(FXStatusbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXStatusbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_SliderPtr(FX_FramePtr):
ID_AUTOINC = controlsc.FX_Slider_ID_AUTOINC
ID_AUTODEC = controlsc.FX_Slider_ID_AUTODEC
ID_LAST = controlsc.FX_Slider_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onUngrabbed,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onMotion,(self,) + _args, _kwargs)
return val
def onTimeInc(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onTimeInc,(self,) + _args, _kwargs)
return val
def onTimeDec(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onTimeDec,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetRealValue,(self,) + _args, _kwargs)
return val
def onCmdGetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetRealValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def onCmdSetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetRealRange,(self,) + _args, _kwargs)
return val
def onCmdGetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetRealRange,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onQueryTip,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getRange,(self,) + _args, _kwargs)
return val
def setValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setValue,(self,) + _args, _kwargs)
return val
def getValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getValue,(self,) + _args, _kwargs)
return val
def getSliderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getSliderStyle,(self,) + _args, _kwargs)
return val
def setSliderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setSliderStyle,(self,) + _args, _kwargs)
return val
def getHeadSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getHeadSize,(self,) + _args, _kwargs)
return val
def setHeadSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setHeadSize,(self,) + _args, _kwargs)
return val
def getSlotSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getSlotSize,(self,) + _args, _kwargs)
return val
def setSlotSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setSlotSize,(self,) + _args, _kwargs)
return val
def getIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getIncrement,(self,) + _args, _kwargs)
return val
def setIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setIncrement,(self,) + _args, _kwargs)
return val
def setTickDelta(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setTickDelta,(self,) + _args, _kwargs)
return val
def getTickDelta(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getTickDelta,(self,) + _args, _kwargs)
return val
def getSlotColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getSlotColor,(self,) + _args, _kwargs)
return val
def setSlotColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setSlotColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getHiliteColor,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setHiliteColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getShadowColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setShadowColor,(self,) + _args, _kwargs)
return val
def getBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getBorderColor,(self,) + _args, _kwargs)
return val
def setBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setBorderColor,(self,) + _args, _kwargs)
return val
def getBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getBaseColor,(self,) + _args, _kwargs)
return val
def setBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setBaseColor,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getHelpText,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setHelpText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getTipText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Slider instance at %s>" % (self.this,)
class FX_Slider(FX_SliderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Slider,_args,_kwargs)
self.thisown = 1
class FXSliderPtr(FX_SliderPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXSlider instance at %s>" % (self.this,)
class FXSlider(FXSliderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXSlider,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_HeaderItemPtr(FX_ObjectPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getText,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setText,(self,) + _args, _kwargs)
return val
def getIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getIcon,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setIcon,(self,) + _args, _kwargs)
return val
def setData(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setData,(self,) + _args, _kwargs)
return val
def getData(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getData,(self,) + _args, _kwargs)
return val
def setSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setSize,(self,) + _args, _kwargs)
return val
def getSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getSize,(self,) + _args, _kwargs)
return val
def setArrowDir(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setArrowDir,(self,) + _args, _kwargs)
return val
def getArrowDir(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getArrowDir,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_HeaderItem instance at %s>" % (self.this,)
class FX_HeaderItem(FX_HeaderItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_HeaderItem,_args,_kwargs)
self.thisown = 1
class FXHeaderItemPtr(FX_HeaderItemPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_onDefault,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_setText,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_setIcon,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXHeaderItem instance at %s>" % (self.this,)
class FXHeaderItem(FXHeaderItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXHeaderItem,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_HeaderPtr(FX_FramePtr):
ID_TIPTIMER = controlsc.FX_Header_ID_TIPTIMER
ID_LAST = controlsc.FX_Header_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onUngrabbed,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onMotion,(self,) + _args, _kwargs)
return val
def onTipTimer(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onTipTimer,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onQueryTip,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onQueryHelp,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getNumItems,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_retrieveItem,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_replaceItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_replaceItem2,(self,) + _args, _kwargs)
return val
def replaceItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_replaceItem2,(self,) + _args, _kwargs)
return val
def insertItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_insertItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_insertItem2,(self,) + _args, _kwargs)
return val
def insertItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_insertItem2,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_appendItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_appendItem2,(self,) + _args, _kwargs)
return val
def appendItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_appendItem2,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_prependItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_prependItem2,(self,) + _args, _kwargs)
return val
def prependItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_prependItem2,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_clearItems,(self,) + _args, _kwargs)
return val
def getItemAt(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemAt,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemText,(self,) + _args, _kwargs)
return val
def setItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemIcon,(self,) + _args, _kwargs)
return val
def getItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemIcon,(self,) + _args, _kwargs)
return val
def setItemSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemSize,(self,) + _args, _kwargs)
return val
def getItemSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemSize,(self,) + _args, _kwargs)
return val
def getItemOffset(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemOffset,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemData,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setTextColor,(self,) + _args, _kwargs)
return val
def setHeaderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setHeaderStyle,(self,) + _args, _kwargs)
return val
def getHeaderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getHeaderStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getHelpText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Header instance at %s>" % (self.this,)
class FX_Header(FX_HeaderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Header,_args,_kwargs)
self.thisown = 1
class FXHeaderPtr(FX_HeaderPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXHeader instance at %s>" % (self.this,)
class FXHeader(FXHeaderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXHeader,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ProgressBarPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onPaint,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setProgress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setProgress,(self,) + _args, _kwargs)
return val
def getProgress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getProgress,(self,) + _args, _kwargs)
return val
def setTotal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setTotal,(self,) + _args, _kwargs)
return val
def getTotal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getTotal,(self,) + _args, _kwargs)
return val
def increment(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_increment,(self,) + _args, _kwargs)
return val
def hideNumber(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_hideNumber,(self,) + _args, _kwargs)
return val
def showNumber(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_showNumber,(self,) + _args, _kwargs)
return val
def setBarSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarSize,(self,) + _args, _kwargs)
return val
def getBarSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarSize,(self,) + _args, _kwargs)
return val
def setBarBGColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarBGColor,(self,) + _args, _kwargs)
return val
def getBarBGColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarBGColor,(self,) + _args, _kwargs)
return val
def setBarColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarColor,(self,) + _args, _kwargs)
return val
def getBarColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getTextColor,(self,) + _args, _kwargs)
return val
def setTextAltColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setTextAltColor,(self,) + _args, _kwargs)
return val
def getTextAltColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getTextAltColor,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getFont,(self,) + _args, _kwargs)
return val
def setBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarStyle,(self,) + _args, _kwargs)
return val
def getBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ProgressBar instance at %s>" % (self.this,)
class FX_ProgressBar(FX_ProgressBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ProgressBar,_args,_kwargs)
self.thisown = 1
class FXProgressBarPtr(FX_ProgressBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXProgressBar instance at %s>" % (self.this,)
class FXProgressBar(FXProgressBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXProgressBar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToolbarTabPtr(FX_FramePtr):
ID_COLLAPSE = controlsc.FX_ToolbarTab_ID_COLLAPSE
ID_UNCOLLAPSE = controlsc.FX_ToolbarTab_ID_UNCOLLAPSE
ID_LAST = controlsc.FX_ToolbarTab_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onLeave,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdCollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onCmdCollapse,(self,) + _args, _kwargs)
return val
def onUpdCollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUpdCollapse,(self,) + _args, _kwargs)
return val
def onCmdUncollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onCmdUncollapse,(self,) + _args, _kwargs)
return val
def onUpdUncollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUpdUncollapse,(self,) + _args, _kwargs)
return val
def collapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_collapse,(self,) + _args, _kwargs)
return val
def isCollapsed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_isCollapsed,(self,) + _args, _kwargs)
return val
def setTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_setTabStyle,(self,) + _args, _kwargs)
return val
def getTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_getTabStyle,(self,) + _args, _kwargs)
return val
def getActiveColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_getActiveColor,(self,) + _args, _kwargs)
return val
def setActiveColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_setActiveColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToolbarTab instance at %s>" % (self.this,)
class FX_ToolbarTab(FX_ToolbarTabPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToolbarTab,_args,_kwargs)
self.thisown = 1
class FXToolbarTabPtr(FX_ToolbarTabPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbarTab instance at %s>" % (self.this,)
class FXToolbarTab(FXToolbarTabPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToolbarTab,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToolbarPtr(FX_PackerPtr):
ID_UNDOCK = controlsc.FX_Toolbar_ID_UNDOCK
ID_DOCK_TOP = controlsc.FX_Toolbar_ID_DOCK_TOP
ID_DOCK_BOTTOM = controlsc.FX_Toolbar_ID_DOCK_BOTTOM
ID_DOCK_LEFT = controlsc.FX_Toolbar_ID_DOCK_LEFT
ID_DOCK_RIGHT = controlsc.FX_Toolbar_ID_DOCK_RIGHT
ID_TOOLBARGRIP = controlsc.FX_Toolbar_ID_TOOLBARGRIP
ID_LAST = controlsc.FX_Toolbar_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onCmdUndock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdUndock,(self,) + _args, _kwargs)
return val
def onUpdUndock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdUndock,(self,) + _args, _kwargs)
return val
def onCmdDockTop(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockTop,(self,) + _args, _kwargs)
return val
def onUpdDockTop(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockTop,(self,) + _args, _kwargs)
return val
def onCmdDockBottom(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockBottom,(self,) + _args, _kwargs)
return val
def onUpdDockBottom(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockBottom,(self,) + _args, _kwargs)
return val
def onCmdDockLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockLeft,(self,) + _args, _kwargs)
return val
def onUpdDockLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockLeft,(self,) + _args, _kwargs)
return val
def onCmdDockRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockRight,(self,) + _args, _kwargs)
return val
def onUpdDockRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockRight,(self,) + _args, _kwargs)
return val
def onBeginDragGrip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onBeginDragGrip,(self,) + _args, _kwargs)
return val
def onEndDragGrip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onEndDragGrip,(self,) + _args, _kwargs)
return val
def onDraggedGrip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onDraggedGrip,(self,) + _args, _kwargs)
return val
def setDryDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_setDryDock,(self,) + _args, _kwargs)
return val
def setWetDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_setWetDock,(self,) + _args, _kwargs)
return val
def getDryDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_getDryDock,(self,) + _args, _kwargs)
return val
def getWetDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_getWetDock,(self,) + _args, _kwargs)
return val
def isDocked(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_isDocked,(self,) + _args, _kwargs)
return val
def dock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_dock,(self,) + _args, _kwargs)
return val
def undock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_undock,(self,) + _args, _kwargs)
return val
def setDockingSide(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_setDockingSide,(self,) + _args, _kwargs)
return val
def getDockingSide(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_getDockingSide,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Toolbar instance at %s>" % (self.this,)
class FX_Toolbar(FX_ToolbarPtr):
def __init__(self,this):
self.this = this
class FXToolbarPtr(FX_ToolbarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_setBackColor,(self,) + _args, _kwargs)
return val
def dock(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_dock,(self,) + _args, _kwargs)
return val
def undock(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_undock,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbar instance at %s>" % (self.this,)
class FXToolbar(FXToolbarPtr):
def __init__(self,*_args,**_kwargs):
try:
self.this = apply(controlsc.CreateFloatingToolbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
return
except:
pass
try:
self.this = apply(controlsc.CreateNonFloatingToolbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
except:
pass
class FX_ToolbarShellPtr(FX_TopWindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_onPaint,(self,) + _args, _kwargs)
return val
def setFrameStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setFrameStyle,(self,) + _args, _kwargs)
return val
def getFrameStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getFrameStyle,(self,) + _args, _kwargs)
return val
def getBorderWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getBorderWidth,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setHiliteColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getHiliteColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setShadowColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getShadowColor,(self,) + _args, _kwargs)
return val
def setBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setBorderColor,(self,) + _args, _kwargs)
return val
def getBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getBorderColor,(self,) + _args, _kwargs)
return val
def setBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setBaseColor,(self,) + _args, _kwargs)
return val
def getBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getBaseColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToolbarShell instance at %s>" % (self.this,)
class FX_ToolbarShell(FX_ToolbarShellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToolbarShell,_args,_kwargs)
self.thisown = 1
class FXToolbarShellPtr(FX_ToolbarShellPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
try:
val = apply(controlsc.FXToolbarShell_show,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FXToolbarShell_show2,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_setBackColor,(self,) + _args, _kwargs)
return val
def show2(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_show2,(self,) + _args, _kwargs)
return val
def iconify(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_iconify,(self,) + _args, _kwargs)
return val
def deiconify(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_deiconify,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbarShell instance at %s>" % (self.this,)
class FXToolbarShell(FXToolbarShellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToolbarShell,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToolbarGripPtr(FX_WindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onMotion,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onLeave,(self,) + _args, _kwargs)
return val
def setDoubleBar(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_setDoubleBar,(self,) + _args, _kwargs)
return val
def getDoubleBar(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getDoubleBar,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_setHiliteColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getHiliteColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_setShadowColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getShadowColor,(self,) + _args, _kwargs)
return val
def getActiveColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getActiveColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToolbarGrip instance at %s>" % (self.this,)
class FX_ToolbarGrip(FX_ToolbarGripPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToolbarGrip,_args,_kwargs)
self.thisown = 1
class FXToolbarGripPtr(FX_ToolbarGripPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbarGrip instance at %s>" % (self.this,)
class FXToolbarGrip(FXToolbarGripPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToolbarGrip,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ListBoxPtr(FX_PackerPtr):
ID_LIST = controlsc.FX_ListBox_ID_LIST
ID_FIELD = controlsc.FX_ListBox_ID_FIELD
ID_LAST = controlsc.FX_ListBox_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onFocusDown,(self,) + _args, _kwargs)
return val
def onFieldButton(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onFieldButton,(self,) + _args, _kwargs)
return val
def onListUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onListUpdate,(self,) + _args, _kwargs)
return val
def onListChanged(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onListChanged,(self,) + _args, _kwargs)
return val
def onListClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onListClicked,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getNumItems,(self,) + _args, _kwargs)
return val
def getNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getNumVisible,(self,) + _args, _kwargs)
return val
def setNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setNumVisible,(self,) + _args, _kwargs)
return val
def isItemCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_isItemCurrent,(self,) + _args, _kwargs)
return val
def setCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setCurrentItem,(self,) + _args, _kwargs)
return val
def getCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getCurrentItem,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_retrieveItem,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_replaceItem,(self,) + _args, _kwargs)
return val
def insertItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_insertItem,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_appendItem,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_prependItem,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_clearItems,(self,) + _args, _kwargs)
return val
def findItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_findItem,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getItemText,(self,) + _args, _kwargs)
return val
def setItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setItemIcon,(self,) + _args, _kwargs)
return val
def getItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getItemIcon,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getItemData,(self,) + _args, _kwargs)
return val
def isPaneShown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_isPaneShown,(self,) + _args, _kwargs)
return val
def sortItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_sortItems,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getFont,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setBackColor,(self,) + _args, _kwargs)
return val
def getBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getBackColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getTextColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getSelTextColor,(self,) + _args, _kwargs)
return val
def getSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getSortFunc,(self,) + _args, _kwargs)
return val
def setSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setSortFunc,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ListBox instance at %s>" % (self.this,)
class FX_ListBox(FX_ListBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ListBox,_args,_kwargs)
self.thisown = 1
class FXListBoxPtr(FX_ListBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXListBox instance at %s>" % (self.this,)
class FXListBox(FXListBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXListBox,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_DriveBoxPtr(FX_ListBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onListChanged(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onListChanged,(self,) + _args, _kwargs)
return val
def onListClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onListClicked,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def setDrive(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_setDrive,(self,) + _args, _kwargs)
return val
def getDrive(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_getDrive,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_DriveBox instance at %s>" % (self.this,)
class FX_DriveBox(FX_DriveBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_DriveBox,_args,_kwargs)
self.thisown = 1
class FXDriveBoxPtr(FX_DriveBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXDriveBox instance at %s>" % (self.this,)
class FXDriveBox(FXDriveBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXDriveBox,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ColorBarPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onMotion,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onQueryTip,(self,) + _args, _kwargs)
return val
def setHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setHue,(self,) + _args, _kwargs)
return val
def getHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getHue,(self,) + _args, _kwargs)
return val
def setSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setSat,(self,) + _args, _kwargs)
return val
def getSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getSat,(self,) + _args, _kwargs)
return val
def setVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setVal,(self,) + _args, _kwargs)
return val
def getVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getVal,(self,) + _args, _kwargs)
return val
def getBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getBarStyle,(self,) + _args, _kwargs)
return val
def setBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setBarStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ColorBar instance at %s>" % (self.this,)
class FX_ColorBar(FX_ColorBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ColorBar,_args,_kwargs)
self.thisown = 1
class FXColorBarPtr(FX_ColorBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXColorBar instance at %s>" % (self.this,)
class FXColorBar(FXColorBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXColorBar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ColorWheelPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onMotion,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onQueryTip,(self,) + _args, _kwargs)
return val
def setHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setHue,(self,) + _args, _kwargs)
return val
def getHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getHue,(self,) + _args, _kwargs)
return val
def setSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setSat,(self,) + _args, _kwargs)
return val
def getSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getSat,(self,) + _args, _kwargs)
return val
def setVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setVal,(self,) + _args, _kwargs)
return val
def getVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getVal,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ColorWheel instance at %s>" % (self.this,)
class FX_ColorWheel(FX_ColorWheelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ColorWheel,_args,_kwargs)
self.thisown = 1
class FXColorWheelPtr(FX_ColorWheelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXColorWheel instance at %s>" % (self.this,)
class FXColorWheel(FXColorWheelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXColorWheel,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
#-------------- FUNCTION WRAPPERS ------------------
def CreateFloatingToolbar(*_args, **_kwargs):
val = apply(controlsc.CreateFloatingToolbar,_args,_kwargs)
if val: val = FXToolbarPtr(val)
return val
def CreateNonFloatingToolbar(*_args, **_kwargs):
val = apply(controlsc.CreateNonFloatingToolbar,_args,_kwargs)
if val: val = FXToolbarPtr(val)
return val
#-------------- VARIABLE WRAPPERS ------------------
JUSTIFY_NORMAL = controlsc.JUSTIFY_NORMAL
JUSTIFY_CENTER_X = controlsc.JUSTIFY_CENTER_X
JUSTIFY_LEFT = controlsc.JUSTIFY_LEFT
JUSTIFY_RIGHT = controlsc.JUSTIFY_RIGHT
JUSTIFY_HZ_APART = controlsc.JUSTIFY_HZ_APART
JUSTIFY_CENTER_Y = controlsc.JUSTIFY_CENTER_Y
JUSTIFY_TOP = controlsc.JUSTIFY_TOP
JUSTIFY_BOTTOM = controlsc.JUSTIFY_BOTTOM
JUSTIFY_VT_APART = controlsc.JUSTIFY_VT_APART
ICON_UNDER_TEXT = controlsc.ICON_UNDER_TEXT
ICON_AFTER_TEXT = controlsc.ICON_AFTER_TEXT
ICON_BEFORE_TEXT = controlsc.ICON_BEFORE_TEXT
ICON_ABOVE_TEXT = controlsc.ICON_ABOVE_TEXT
ICON_BELOW_TEXT = controlsc.ICON_BELOW_TEXT
TEXT_OVER_ICON = controlsc.TEXT_OVER_ICON
TEXT_AFTER_ICON = controlsc.TEXT_AFTER_ICON
TEXT_BEFORE_ICON = controlsc.TEXT_BEFORE_ICON
TEXT_ABOVE_ICON = controlsc.TEXT_ABOVE_ICON
TEXT_BELOW_ICON = controlsc.TEXT_BELOW_ICON
LABEL_NORMAL = controlsc.LABEL_NORMAL
DIAL_VERTICAL = controlsc.DIAL_VERTICAL
DIAL_HORIZONTAL = controlsc.DIAL_HORIZONTAL
DIAL_CYCLIC = controlsc.DIAL_CYCLIC
DIAL_HAS_NOTCH = controlsc.DIAL_HAS_NOTCH
DIAL_NORMAL = controlsc.DIAL_NORMAL
COLORWELL_OPAQUEONLY = controlsc.COLORWELL_OPAQUEONLY
COLORWELL_SOURCEONLY = controlsc.COLORWELL_SOURCEONLY
COLORWELL_NORMAL = controlsc.COLORWELL_NORMAL
TEXTFIELD_PASSWD = controlsc.TEXTFIELD_PASSWD
TEXTFIELD_INTEGER = controlsc.TEXTFIELD_INTEGER
TEXTFIELD_REAL = controlsc.TEXTFIELD_REAL
TEXTFIELD_READONLY = controlsc.TEXTFIELD_READONLY
TEXTFIELD_ENTER_ONLY = controlsc.TEXTFIELD_ENTER_ONLY
TEXTFIELD_LIMITED = controlsc.TEXTFIELD_LIMITED
TEXTFIELD_OVERSTRIKE = controlsc.TEXTFIELD_OVERSTRIKE
TEXTFIELD_NORMAL = controlsc.TEXTFIELD_NORMAL
STATE_UP = controlsc.STATE_UP
STATE_DOWN = controlsc.STATE_DOWN
STATE_ENGAGED = controlsc.STATE_ENGAGED
STATE_UNCHECKED = controlsc.STATE_UNCHECKED
STATE_CHECKED = controlsc.STATE_CHECKED
BUTTON_AUTOGRAY = controlsc.BUTTON_AUTOGRAY
BUTTON_AUTOHIDE = controlsc.BUTTON_AUTOHIDE
BUTTON_TOOLBAR = controlsc.BUTTON_TOOLBAR
BUTTON_DEFAULT = controlsc.BUTTON_DEFAULT
BUTTON_INITIAL = controlsc.BUTTON_INITIAL
BUTTON_NORMAL = controlsc.BUTTON_NORMAL
TOGGLEBUTTON_AUTOGRAY = controlsc.TOGGLEBUTTON_AUTOGRAY
TOGGLEBUTTON_AUTOHIDE = controlsc.TOGGLEBUTTON_AUTOHIDE
TOGGLEBUTTON_TOOLBAR = controlsc.TOGGLEBUTTON_TOOLBAR
TOGGLEBUTTON_NORMAL = controlsc.TOGGLEBUTTON_NORMAL
RADIOBUTTON_AUTOGRAY = controlsc.RADIOBUTTON_AUTOGRAY
RADIOBUTTON_AUTOHIDE = controlsc.RADIOBUTTON_AUTOHIDE
RADIOBUTTON_NORMAL = controlsc.RADIOBUTTON_NORMAL
CHECKBUTTON_AUTOGRAY = controlsc.CHECKBUTTON_AUTOGRAY
CHECKBUTTON_AUTOHIDE = controlsc.CHECKBUTTON_AUTOHIDE
CHECKBUTTON_NORMAL = controlsc.CHECKBUTTON_NORMAL
ARROW_NONE = controlsc.ARROW_NONE
ARROW_UP = controlsc.ARROW_UP
ARROW_DOWN = controlsc.ARROW_DOWN
ARROW_LEFT = controlsc.ARROW_LEFT
ARROW_RIGHT = controlsc.ARROW_RIGHT
ARROW_REPEAT = controlsc.ARROW_REPEAT
ARROW_AUTOGRAY = controlsc.ARROW_AUTOGRAY
ARROW_AUTOHIDE = controlsc.ARROW_AUTOHIDE
ARROW_TOOLBAR = controlsc.ARROW_TOOLBAR
ARROW_NORMAL = controlsc.ARROW_NORMAL
SPIN_NORMAL = controlsc.SPIN_NORMAL
SPIN_CYCLIC = controlsc.SPIN_CYCLIC
SPIN_NOTEXT = controlsc.SPIN_NOTEXT
SPIN_NOMAX = controlsc.SPIN_NOMAX
SPIN_NOMIN = controlsc.SPIN_NOMIN
TOOLTIP_NORMAL = controlsc.TOOLTIP_NORMAL
TOOLTIP_PERMANENT = controlsc.TOOLTIP_PERMANENT
TOOLTIP_VARIABLE = controlsc.TOOLTIP_VARIABLE
TAB_TOP = controlsc.TAB_TOP
TAB_LEFT = controlsc.TAB_LEFT
TAB_RIGHT = controlsc.TAB_RIGHT
TAB_BOTTOM = controlsc.TAB_BOTTOM
TAB_TOP_NORMAL = controlsc.TAB_TOP_NORMAL
TAB_BOTTOM_NORMAL = controlsc.TAB_BOTTOM_NORMAL
TAB_LEFT_NORMAL = controlsc.TAB_LEFT_NORMAL
TAB_RIGHT_NORMAL = controlsc.TAB_RIGHT_NORMAL
TABBOOK_TOPTABS = controlsc.TABBOOK_TOPTABS
TABBOOK_BOTTOMTABS = controlsc.TABBOOK_BOTTOMTABS
TABBOOK_SIDEWAYS = controlsc.TABBOOK_SIDEWAYS
TABBOOK_LEFTTABS = controlsc.TABBOOK_LEFTTABS
TABBOOK_RIGHTTABS = controlsc.TABBOOK_RIGHTTABS
TABBOOK_NORMAL = controlsc.TABBOOK_NORMAL
SCROLLBAR_HORIZONTAL = controlsc.SCROLLBAR_HORIZONTAL
SCROLLBAR_VERTICAL = controlsc.SCROLLBAR_VERTICAL
LIST_EXTENDEDSELECT = controlsc.LIST_EXTENDEDSELECT
LIST_SINGLESELECT = controlsc.LIST_SINGLESELECT
LIST_BROWSESELECT = controlsc.LIST_BROWSESELECT
LIST_MULTIPLESELECT = controlsc.LIST_MULTIPLESELECT
LIST_AUTOSELECT = controlsc.LIST_AUTOSELECT
LIST_NORMAL = controlsc.LIST_NORMAL
COMBOBOX_NO_REPLACE = controlsc.COMBOBOX_NO_REPLACE
COMBOBOX_REPLACE = controlsc.COMBOBOX_REPLACE
COMBOBOX_INSERT_BEFORE = controlsc.COMBOBOX_INSERT_BEFORE
COMBOBOX_INSERT_AFTER = controlsc.COMBOBOX_INSERT_AFTER
COMBOBOX_INSERT_FIRST = controlsc.COMBOBOX_INSERT_FIRST
COMBOBOX_INSERT_LAST = controlsc.COMBOBOX_INSERT_LAST
COMBOBOX_STATIC = controlsc.COMBOBOX_STATIC
COMBOBOX_NORMAL = controlsc.COMBOBOX_NORMAL
STATUSBAR_WITH_DRAGCORNER = controlsc.STATUSBAR_WITH_DRAGCORNER
SLIDERBAR_SIZE = controlsc.SLIDERBAR_SIZE
SLIDERHEAD_SIZE = controlsc.SLIDERHEAD_SIZE
SLIDER_HORIZONTAL = controlsc.SLIDER_HORIZONTAL
SLIDER_VERTICAL = controlsc.SLIDER_VERTICAL
SLIDER_ARROW_UP = controlsc.SLIDER_ARROW_UP
SLIDER_ARROW_DOWN = controlsc.SLIDER_ARROW_DOWN
SLIDER_ARROW_LEFT = controlsc.SLIDER_ARROW_LEFT
SLIDER_ARROW_RIGHT = controlsc.SLIDER_ARROW_RIGHT
SLIDER_INSIDE_BAR = controlsc.SLIDER_INSIDE_BAR
SLIDER_TICKS_TOP = controlsc.SLIDER_TICKS_TOP
SLIDER_TICKS_BOTTOM = controlsc.SLIDER_TICKS_BOTTOM
SLIDER_TICKS_LEFT = controlsc.SLIDER_TICKS_LEFT
SLIDER_TICKS_RIGHT = controlsc.SLIDER_TICKS_RIGHT
SLIDER_NORMAL = controlsc.SLIDER_NORMAL
HEADER_BUTTON = controlsc.HEADER_BUTTON
HEADER_HORIZONTAL = controlsc.HEADER_HORIZONTAL
HEADER_VERTICAL = controlsc.HEADER_VERTICAL
HEADER_TRACKING = controlsc.HEADER_TRACKING
HEADER_NORMAL = controlsc.HEADER_NORMAL
PROGRESSBAR_HORIZONTAL = controlsc.PROGRESSBAR_HORIZONTAL
PROGRESSBAR_VERTICAL = controlsc.PROGRESSBAR_VERTICAL
PROGRESSBAR_PERCENTAGE = controlsc.PROGRESSBAR_PERCENTAGE
PROGRESSBAR_DIAL = controlsc.PROGRESSBAR_DIAL
PROGRESSBAR_NORMAL = controlsc.PROGRESSBAR_NORMAL
TOOLBARTAB_HORIZONTAL = controlsc.TOOLBARTAB_HORIZONTAL
TOOLBARTAB_VERTICAL = controlsc.TOOLBARTAB_VERTICAL
TOOLBARGRIP_SINGLE = controlsc.TOOLBARGRIP_SINGLE
TOOLBARGRIP_DOUBLE = controlsc.TOOLBARGRIP_DOUBLE
TOOLBARGRIP_SEPARATOR = controlsc.TOOLBARGRIP_SEPARATOR
LISTBOX_NORMAL = controlsc.LISTBOX_NORMAL
COLORBAR_HORIZONTAL = controlsc.COLORBAR_HORIZONTAL
COLORBAR_VERTICAL = controlsc.COLORBAR_VERTICAL
cvar = controlsc.cvar
|
lgpl-2.1
| 2,367,650,108,529,640,000
| 43.259746
| 91
| 0.615725
| false
| 3.429599
| false
| false
| false
|
veusz/veusz
|
veusz/plugins/votable.py
|
1
|
3549
|
# Copyright (C) 2012 Science and Technology Facilities Council.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
import io
from urllib import request
from .importplugin import ImportPlugin, importpluginregistry
from .datasetplugin import Dataset1D, DatasetText
try:
from astropy.io.votable.table import parse
except ImportError:
parse = None
print('VO table import: astropy module not available')
class ImportPluginVoTable(ImportPlugin):
name = 'VO table import'
author = 'Graham Bell'
description = 'Reads datasets from VO tables'
def _load_votable(self, params):
if 'url' in params.field_results:
try:
buff = io.StringIO(request.urlopen(
params.field_results['url']).read())
except TypeError:
buff = io.BytesIO(request.urlopen(
params.field_results['url']).read())
return parse(buff, filename=params.filename)
else:
return parse(params.filename)
def doImport(self, params):
result = []
votable = self._load_votable(params)
for table in votable.iter_tables():
for field in table.fields:
fieldname = field.name
if field.datatype in [
'float', 'double', 'short', 'int', 'unsignedByte']:
result.append(Dataset1D(
fieldname, table.array[fieldname]))
elif field.datatype in ['char', 'string', 'unicodeChar']:
result.append(DatasetText(
fieldname, table.array[fieldname]))
elif field.datatype in ['floatComplex', 'doubleComplex']:
print(
'VO table import: skipping complex field ' +
fieldname)
elif field.datatype in ['boolean', 'bit']:
print(
'VO table import: skipping boolean field ' +
fieldname)
else:
print(
'VO table import: unknown data type ' +
field.datatype + ' for field ' + fieldname)
return result
def getPreview(self, params):
try:
votable = self._load_votable(params)
except:
return ('', False)
summary = []
for table in votable.iter_tables():
summary.append(table.name + ':')
for field in table.fields:
summary.append(
' ' + field.name +
' (' + field.datatype +')')
return ('\n'.join(summary), True)
if parse is not None:
importpluginregistry += [ImportPluginVoTable]
|
gpl-2.0
| -2,412,068,919,430,953,500
| 34.49
| 78
| 0.560158
| false
| 4.738318
| false
| false
| false
|
plaufer/wikiwsd
|
wsd/build/articleinserter.py
|
1
|
1279
|
import Queue
import threading
MAX_WAIT_QUEUE_TIMEOUT = 2
class ArticleInserter(threading.Thread):
'''Thread which inserts articles into the database
'''
def __init__(self, queue, build_view):
threading.Thread.__init__(self)
'''constructor
@param queue the queue to which the articles and redirects are read
@param build_view the database build view to use to connect to the database
'''
self._queue = queue
self._build_view = build_view
self._end = False
def run(self):
while not self._end:
try:
# fetch item from queue
item = self._queue.get(True, MAX_WAIT_QUEUE_TIMEOUT)
# insert as article or redirect respectively
if item['type'] == 'article':
self._build_view.insert_article(item['id'], item['title'])
else:
self._build_view.insert_redirect(item['title'], item['target'])
# commit and mark as done
self._build_view.commit()
self._build_view.reset_cache()
self._queue.task_done()
except Queue.Empty:
pass
def end(self):
self._end = True
|
mit
| 5,740,311,425,071,852,000
| 29.47619
| 86
| 0.542611
| false
| 4.650909
| false
| false
| false
|
Unknowncmbk/Two-Shot
|
backend/participant_stat.py
|
1
|
2762
|
#!/usr/bin/python
# local imports
import credentials
# python modules
import MySQLdb
import urllib
import json
class ParticipantStat(object):
def __init__(self, match_id, participant_id):
self.match_id = match_id
self.participant_id = participant_id
self.kills = 0
self.deaths = 0
self.assists = 0
self.magic_damage = 0
self.magic_damage_champs = 0
self.magic_damage_taken = 0
self.champ_level = 0
self.gold_earned = 0
self.win = 0
def __setKDA__(self, kills, deaths, assists):
self.kills = kills
self.deaths = deaths
self.assists = assists
def __setDamage__(self, magic_damage, magic_damage_champs, magic_damage_taken):
self.magic_damage = magic_damage
self.magic_damage_champs = magic_damage_champs
self.magic_damage_taken = magic_damage_taken
def __setOther__(self, champ_level, gold_earned, win):
self.champ_level = champ_level
self.gold_earned = gold_earned
self.win = win
def __str__(self):
return "match_id: " + str(self.match_id) + "\nparticipant_id: " + str(self.participant_id)
def save(self):
"""
Saves this ParticipantStat to the database.
"""
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''INSERT IGNORE INTO participant_stat (match_id, participant_id, kills, deaths, assists, magic_damage, magic_damage_champs, magic_damage_taken, champ_level, gold_earned, win)
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);'''
data = (self.match_id, self.participant_id, self.kills, self.deaths, self.assists, self.magic_damage, self.magic_damage_champs, self.magic_damage_taken, self.champ_level, self.gold_earned, self.win)
cur.execute(query, data)
# commit query
db.commit()
db.close()
return True
def load(match_id, participant_id):
'''
Args:
item_id: The id of the item to query
match_id: The id of the match
participant_id: The id of the participant
Returns:
A ParticipantStat object.
'''
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''SELECT * FROM participant_stat WHERE match_id = %s AND participant_id = %s;'''
cur.execute(query, match_id, participant_id)
pa = ""
for tup in cur:
pa = ParticipantStat(tup[0], tup[1], tup[2], tup[3], tup[4], tup[5], tup[6], tup[7], tup[8], tup[9], tup[10])
# commit query
db.commit()
db.close()
return pa
|
mit
| -8,455,300,076,131,699,000
| 29.033708
| 206
| 0.581101
| false
| 3.405672
| false
| false
| false
|
storborg/pyweaving
|
pyweaving/generators/tartan.py
|
1
|
3330
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import re
from .. import Draft
color_map = {
'A': (92, 140, 168), # azure / light blue
'G': (0, 104, 24), # green
'B': (44, 44, 128), # blue
'K': (0, 0, 0), # black
'W': (224, 224, 224), # white
'Y': (232, 192, 0), # yellow
'R': (200, 0, 44), # red
'P': (120, 0, 120), # purple
'C': (208, 80, 84), # ??? light red of some kind
'LP': (180, 104, 172), # light purple
}
def tartan(sett, repeats=1):
colors = []
for piece in sett.split(', '):
m = re.match('([A-Z]+)(\d+)', piece)
colors.append((
color_map[m.group(1)],
int(m.group(2)),
))
# tartan is always the same design mirrored once
colors.extend(reversed(colors))
print("Threads per repeat: %d" %
sum(count for color, count in colors))
# tartan is always 2/2 twill
# we'll need 4 shafts and 4 treadles
draft = Draft(num_shafts=4, num_treadles=4)
# do tie-up
for ii in range(4):
draft.treadles[3 - ii].shafts.add(draft.shafts[ii])
draft.treadles[3 - ii].shafts.add(draft.shafts[(ii + 1) % 4])
thread_no = 0
for ii in range(repeats):
for color, count in colors:
for jj in range(count):
draft.add_warp_thread(
color=color,
shaft=thread_no % 4,
)
draft.add_weft_thread(
color=color,
treadles=[thread_no % 4],
)
thread_no += 1
return draft
# Tartan Setts
gordon_red = ('A12, G12, R18, K12, R18, B18, W4, C16, W4, K32, A12, '
'W4, B32, W4, G36')
gordon_modern = 'B24, K4, B4, K4, B4, K24, G24, Y4, G24, K24, B24, K4, B4'
gordon_dress = ('W4, B2, W24, B4, W4, K16, B16, K4, B4, K4, B16, K16, '
'G16, K2, Y4, K2, G16, K16, W4, B4, W24, B2, W4')
gordon_old = 'B24, K4, B4, K4, B4, K24, G24, Y4, G24, K24, B24, K4, B4'
gordon_red_muted = ('A12, G12, R18, K12, R18, B18, W4, C16, W4, K32, A12, '
'W4, B32, W4, G36')
gordon_red_old_huntly = ('B28, W2, G16, W2, DG32, A12, W2, B28, W2, G28, '
'A12, G12, R16, DG12, R16, DG2')
gordon_old_ancient = 'K8, B46, K46, G44, Y6, G6, Y12'
gordon_of_abergeldie = 'G36, Y2, LP12, K2, W2, R40'
gordon_of_esselmont = 'K8, P46, K46, G44, Y6, G6, Y12'
gordon_roxburgh_district = 'B4, R2, G32, B16, W2, B2, W2, B32'
gordon_roxburgh_red = 'B6, DG52, B6, R6, B40, R6, B6, R52, DG10, W6'
gordon_roxburgh_red_muted = 'B6, DG52, B6, R6, B40, R6, B6, R52, DG10, W6'
gordon_huntly_district = ('G16, R4, G16, R24, B2, R2, B4, R2, B2, R24, B2, '
'R2, B4, R2, B2, R24, W2, R6, Y2, B24, R6, B24, '
'Y2, R6, W2, R24, G4, R6, G4, R24, G16, R4, G16')
gordon_aberdeen_district = ('W4, LG8, K32, W4, P12, A8, W4, A8, P12, W4, P6, '
'R16, LR6, W4, LR6, R16, P6, W4, K24, LG8, K24, '
'W4, P6, R16, LR6, W4, LR6, R16, P6, W4, A20, W4, '
'R12, LR6, W2, LR6, R12, W4, LG8, K32, W4, R46, '
'LR6, W4')
gordon_huntly = 'R4, MB6, FB24, K22, MG22, Y4'
|
mit
| -5,582,420,518,683,146,000
| 31.647059
| 79
| 0.494595
| false
| 2.318942
| false
| false
| false
|
timj/scons
|
src/engine/SCons/Tool/docbook/__init__.py
|
1
|
29293
|
"""SCons.Tool.docbook
Tool-specific initialization for Docbook.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os
import glob
import re
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Script
import SCons.Tool
import SCons.Util
# Get full path to this script
scriptpath = os.path.dirname(os.path.realpath(__file__))
# Local folder for the collection of DocBook XSLs
db_xsl_folder = 'docbook-xsl-1.76.1'
# Do we have libxml2/libxslt/lxml?
has_libxml2 = True
has_lxml = True
try:
import libxml2
import libxslt
except:
has_libxml2 = False
try:
import lxml
except:
has_lxml = False
# Set this to True, to prefer xsltproc over libxml2 and lxml
prefer_xsltproc = False
# Regexs for parsing Docbook XML sources of MAN pages
re_manvolnum = re.compile("<manvolnum>([^<]*)</manvolnum>")
re_refname = re.compile("<refname>([^<]*)</refname>")
#
# Helper functions
#
def __extend_targets_sources(target, source):
""" Prepare the lists of target and source files. """
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target[:]
elif not SCons.Util.is_List(source):
source = [source]
if len(target) < len(source):
target.extend(source[len(target):])
return target, source
def __init_xsl_stylesheet(kw, env, user_xsl_var, default_path):
if kw.get('DOCBOOK_XSL','') == '':
xsl_style = kw.get('xsl', env.subst(user_xsl_var))
if xsl_style == '':
path_args = [scriptpath, db_xsl_folder] + default_path
xsl_style = os.path.join(*path_args)
kw['DOCBOOK_XSL'] = xsl_style
def __select_builder(lxml_builder, libxml2_builder, cmdline_builder):
""" Selects a builder, based on which Python modules are present. """
if prefer_xsltproc:
return cmdline_builder
if not has_libxml2:
# At the moment we prefer libxml2 over lxml, the latter can lead
# to conflicts when installed together with libxml2.
if has_lxml:
return lxml_builder
else:
return cmdline_builder
return libxml2_builder
def __ensure_suffix(t, suffix):
""" Ensure that the target t has the given suffix. """
tpath = str(t)
if not tpath.endswith(suffix):
return tpath+suffix
return t
def __ensure_suffix_stem(t, suffix):
""" Ensure that the target t has the given suffix, and return the file's stem. """
tpath = str(t)
if not tpath.endswith(suffix):
stem = tpath
tpath += suffix
return tpath, stem
else:
stem, ext = os.path.splitext(tpath)
return t, stem
def __get_xml_text(root):
""" Return the text for the given root node (xml.dom.minidom). """
txt = ""
for e in root.childNodes:
if (e.nodeType == e.TEXT_NODE):
txt += e.data
return txt
def __create_output_dir(base_dir):
""" Ensure that the output directory base_dir exists. """
root, tail = os.path.split(base_dir)
dir = None
if tail:
if base_dir.endswith('/'):
dir = base_dir
else:
dir = root
else:
if base_dir.endswith('/'):
dir = base_dir
if dir and not os.path.isdir(dir):
os.makedirs(dir)
#
# Supported command line tools and their call "signature"
#
xsltproc_com = {'xsltproc' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE',
'saxon' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE $DOCBOOK_XSLTPROCPARAMS',
'saxon-xslt' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE $DOCBOOK_XSLTPROCPARAMS',
'xalan' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -q -out $TARGET -xsl $DOCBOOK_XSL -in $SOURCE'}
xmllint_com = {'xmllint' : '$DOCBOOK_XMLLINT $DOCBOOK_XMLLINTFLAGS --xinclude $SOURCE > $TARGET'}
fop_com = {'fop' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -fo $SOURCE -pdf $TARGET',
'xep' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -valid -fo $SOURCE -pdf $TARGET',
'jw' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -f docbook -b pdf $SOURCE -o $TARGET'}
def __detect_cl_tool(env, chainkey, cdict):
"""
Helper function, picks a command line tool from the list
and initializes its environment variables.
"""
if env.get(chainkey,'') == '':
clpath = ''
for cltool in cdict:
clpath = env.WhereIs(cltool)
if clpath:
env[chainkey] = clpath
if not env[chainkey + 'COM']:
env[chainkey + 'COM'] = cdict[cltool]
def _detect(env):
"""
Detect all the command line tools that we might need for creating
the requested output formats.
"""
global prefer_xsltproc
if env.get('DOCBOOK_PREFER_XSLTPROC',''):
prefer_xsltproc = True
if ((not has_libxml2 and not has_lxml) or (prefer_xsltproc)):
# Try to find the XSLT processors
__detect_cl_tool(env, 'DOCBOOK_XSLTPROC', xsltproc_com)
__detect_cl_tool(env, 'DOCBOOK_XMLLINT', xmllint_com)
__detect_cl_tool(env, 'DOCBOOK_FOP', fop_com)
#
# Scanners
#
include_re = re.compile('fileref\\s*=\\s*["|\']([^\\n]*)["|\']')
sentity_re = re.compile('<!ENTITY\\s+%*\\s*[^\\s]+\\s+SYSTEM\\s+["|\']([^\\n]*)["|\']>')
def __xml_scan(node, env, path, arg):
""" Simple XML file scanner, detecting local images and XIncludes as implicit dependencies. """
# Does the node exist yet?
if not os.path.isfile(str(node)):
return []
if env.get('DOCBOOK_SCANENT',''):
# Use simple pattern matching for system entities..., no support
# for recursion yet.
contents = node.get_text_contents()
return sentity_re.findall(contents)
xsl_file = os.path.join(scriptpath,'utils','xmldepend.xsl')
if not has_libxml2 or prefer_xsltproc:
if has_lxml and not prefer_xsltproc:
from lxml import etree
xsl_tree = etree.parse(xsl_file)
doc = etree.parse(str(node))
result = doc.xslt(xsl_tree)
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Try to call xsltproc
xsltproc = env.subst("$DOCBOOK_XSLTPROC")
if xsltproc and xsltproc.endswith('xsltproc'):
result = env.backtick(' '.join([xsltproc, xsl_file, str(node)]))
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Use simple pattern matching, there is currently no support
# for xi:includes...
contents = node.get_text_contents()
return include_re.findall(contents)
styledoc = libxml2.parseFile(xsl_file)
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.readFile(str(node), None, libxml2.XML_PARSE_NOENT)
result = style.applyStylesheet(doc, None)
depfiles = []
for x in str(result).splitlines():
if x.strip() != "" and not x.startswith("<?xml "):
depfiles.extend(x.strip().split())
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return depfiles
# Creating the instance of our XML dependency scanner
docbook_xml_scanner = SCons.Script.Scanner(function = __xml_scan,
argument = None)
#
# Action generators
#
def __generate_xsltproc_action(source, target, env, for_signature):
cmd = env['DOCBOOK_XSLTPROCCOM']
# Does the environment have a base_dir defined?
base_dir = env.subst('$base_dir')
if base_dir:
# Yes, so replace target path by its filename
return cmd.replace('$TARGET','${TARGET.file}')
return cmd
#
# Emitters
#
def __emit_xsl_basedir(target, source, env):
# Does the environment have a base_dir defined?
base_dir = env.subst('$base_dir')
if base_dir:
# Yes, so prepend it to each target
return [os.path.join(base_dir, str(t)) for t in target], source
# No, so simply pass target and source names through
return target, source
#
# Builders
#
def __build_libxml2(target, source, env):
"""
General XSLT builder (HTML/FO), using the libxml2 module.
"""
xsl_style = env.subst('$DOCBOOK_XSL')
styledoc = libxml2.parseFile(xsl_style)
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.readFile(str(source[0]),None,libxml2.XML_PARSE_NOENT)
# Support for additional parameters
parampass = {}
if parampass:
result = style.applyStylesheet(doc, parampass)
else:
result = style.applyStylesheet(doc, None)
style.saveResultToFilename(str(target[0]), result, 0)
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return None
def __build_lxml(target, source, env):
"""
General XSLT builder (HTML/FO), using the lxml module.
"""
from lxml import etree
xslt_ac = etree.XSLTAccessControl(read_file=True,
write_file=True,
create_dir=True,
read_network=False,
write_network=False)
xsl_style = env.subst('$DOCBOOK_XSL')
xsl_tree = etree.parse(xsl_style)
transform = etree.XSLT(xsl_tree, access_control=xslt_ac)
doc = etree.parse(str(source[0]))
# Support for additional parameters
parampass = {}
if parampass:
result = transform(doc, **parampass)
else:
result = transform(doc)
try:
of = open(str(target[0]), "wb")
of.write(of.write(etree.tostring(result, pretty_print=True)))
of.close()
except:
pass
return None
def __xinclude_libxml2(target, source, env):
"""
Resolving XIncludes, using the libxml2 module.
"""
doc = libxml2.readFile(str(source[0]), None, libxml2.XML_PARSE_NOENT)
doc.xincludeProcessFlags(libxml2.XML_PARSE_NOENT)
doc.saveFile(str(target[0]))
doc.freeDoc()
return None
def __xinclude_lxml(target, source, env):
"""
Resolving XIncludes, using the lxml module.
"""
from lxml import etree
doc = etree.parse(str(source[0]))
doc.xinclude()
try:
doc.write(str(target[0]), xml_declaration=True,
encoding="UTF-8", pretty_print=True)
except:
pass
return None
__libxml2_builder = SCons.Builder.Builder(
action = __build_libxml2,
src_suffix = '.xml',
source_scanner = docbook_xml_scanner,
emitter = __emit_xsl_basedir)
__lxml_builder = SCons.Builder.Builder(
action = __build_lxml,
src_suffix = '.xml',
source_scanner = docbook_xml_scanner,
emitter = __emit_xsl_basedir)
__xinclude_libxml2_builder = SCons.Builder.Builder(
action = __xinclude_libxml2,
suffix = '.xml',
src_suffix = '.xml',
source_scanner = docbook_xml_scanner)
__xinclude_lxml_builder = SCons.Builder.Builder(
action = __xinclude_lxml,
suffix = '.xml',
src_suffix = '.xml',
source_scanner = docbook_xml_scanner)
__xsltproc_builder = SCons.Builder.Builder(
action = SCons.Action.CommandGeneratorAction(__generate_xsltproc_action,
{'cmdstr' : '$DOCBOOK_XSLTPROCCOMSTR'}),
src_suffix = '.xml',
source_scanner = docbook_xml_scanner,
emitter = __emit_xsl_basedir)
__xmllint_builder = SCons.Builder.Builder(
action = SCons.Action.Action('$DOCBOOK_XMLLINTCOM','$DOCBOOK_XMLLINTCOMSTR'),
suffix = '.xml',
src_suffix = '.xml',
source_scanner = docbook_xml_scanner)
__fop_builder = SCons.Builder.Builder(
action = SCons.Action.Action('$DOCBOOK_FOPCOM','$DOCBOOK_FOPCOMSTR'),
suffix = '.pdf',
src_suffix = '.fo',
ensure_suffix=1)
def DocbookEpub(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for ePub output.
"""
import zipfile
import shutil
def build_open_container(target, source, env):
"""Generate the *.epub file from intermediate outputs
Constructs the epub file according to the Open Container Format. This
function could be replaced by a call to the SCons Zip builder if support
was added for different compression formats for separate source nodes.
"""
zf = zipfile.ZipFile(str(target[0]), 'w')
mime_file = open('mimetype', 'w')
mime_file.write('application/epub+zip')
mime_file.close()
zf.write(mime_file.name, compress_type = zipfile.ZIP_STORED)
for s in source:
if os.path.isfile(str(s)):
head, tail = os.path.split(str(s))
if not head:
continue
s = head
for dirpath, dirnames, filenames in os.walk(str(s)):
for fname in filenames:
path = os.path.join(dirpath, fname)
if os.path.isfile(path):
zf.write(path, os.path.relpath(path, str(env.get('ZIPROOT', ''))),
zipfile.ZIP_DEFLATED)
zf.close()
def add_resources(target, source, env):
"""Add missing resources to the OEBPS directory
Ensure all the resources in the manifest are present in the OEBPS directory.
"""
hrefs = []
content_file = os.path.join(source[0].get_abspath(), 'content.opf')
if not os.path.isfile(content_file):
return
hrefs = []
if has_libxml2:
nsmap = {'opf' : 'http://www.idpf.org/2007/opf'}
# Read file and resolve entities
doc = libxml2.readFile(content_file, None, 0)
opf = doc.getRootElement()
# Create xpath context
xpath_context = doc.xpathNewContext()
# Register namespaces
for key, val in nsmap.iteritems():
xpath_context.xpathRegisterNs(key, val)
if hasattr(opf, 'xpathEval') and xpath_context:
# Use the xpath context
xpath_context.setContextNode(opf)
items = xpath_context.xpathEval(".//opf:item")
else:
items = opf.findall(".//{'http://www.idpf.org/2007/opf'}item")
for item in items:
if hasattr(item, 'prop'):
hrefs.append(item.prop('href'))
else:
hrefs.append(item.attrib['href'])
doc.freeDoc()
xpath_context.xpathFreeContext()
elif has_lxml:
from lxml import etree
opf = etree.parse(content_file)
# All the opf:item elements are resources
for item in opf.xpath('//opf:item',
namespaces= { 'opf': 'http://www.idpf.org/2007/opf' }):
hrefs.append(item.attrib['href'])
for href in hrefs:
# If the resource was not already created by DocBook XSL itself,
# copy it into the OEBPS folder
referenced_file = os.path.join(source[0].get_abspath(), href)
if not os.path.exists(referenced_file):
shutil.copy(href, os.path.join(source[0].get_abspath(), href))
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_EPUB', ['epub','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
if not env.GetOption('clean'):
# Ensure that the folders OEBPS and META-INF exist
__create_output_dir('OEBPS/')
__create_output_dir('META-INF/')
dirs = env.Dir(['OEBPS', 'META-INF'])
# Set the fixed base_dir
kw['base_dir'] = 'OEBPS/'
tocncx = __builder.__call__(env, 'toc.ncx', source[0], **kw)
cxml = env.File('META-INF/container.xml')
env.SideEffect(cxml, tocncx)
env.Depends(tocncx, kw['DOCBOOK_XSL'])
result.extend(tocncx+[cxml])
container = env.Command(__ensure_suffix(str(target[0]), '.epub'),
tocncx+[cxml], [add_resources, build_open_container])
mimetype = env.File('mimetype')
env.SideEffect(mimetype, container)
result.extend(container)
# Add supporting files for cleanup
env.Clean(tocncx, dirs)
return result
def DocbookHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTML', ['html','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, __ensure_suffix(t,'.html'), s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
def DocbookHtmlChunked(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for chunked HTML output.
"""
# Init target/source
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTMLCHUNKED', ['html','chunkfast.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, glob.glob(os.path.join(base_dir, '*.html')))
return result
def DocbookHtmlhelp(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTMLHELP output.
"""
# Init target/source
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTMLHELP', ['htmlhelp','htmlhelp.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, ['toc.hhc', 'htmlhelp.hhp', 'index.hhk'] +
glob.glob(os.path.join(base_dir, '[ar|bk|ch]*.html')))
return result
def DocbookPdf(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for PDF output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_PDF', ['fo','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
t, stem = __ensure_suffix_stem(t, '.pdf')
xsl = __builder.__call__(env, stem+'.fo', s, **kw)
result.extend(xsl)
env.Depends(xsl, kw['DOCBOOK_XSL'])
result.extend(__fop_builder.__call__(env, t, xsl, **kw))
return result
def DocbookMan(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for Man page output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_MAN', ['manpages','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
volnum = "1"
outfiles = []
srcfile = __ensure_suffix(str(s),'.xml')
if os.path.isfile(srcfile):
try:
import xml.dom.minidom
dom = xml.dom.minidom.parse(__ensure_suffix(str(s),'.xml'))
# Extract volume number, default is 1
for node in dom.getElementsByTagName('refmeta'):
for vol in node.getElementsByTagName('manvolnum'):
volnum = __get_xml_text(vol)
# Extract output filenames
for node in dom.getElementsByTagName('refnamediv'):
for ref in node.getElementsByTagName('refname'):
outfiles.append(__get_xml_text(ref)+'.'+volnum)
except:
# Use simple regex parsing
f = open(__ensure_suffix(str(s),'.xml'), 'r')
content = f.read()
f.close()
for m in re_manvolnum.finditer(content):
volnum = m.group(1)
for m in re_refname.finditer(content):
outfiles.append(m.group(1)+'.'+volnum)
if not outfiles:
# Use stem of the source file
spath = str(s)
if not spath.endswith('.xml'):
outfiles.append(spath+'.'+volnum)
else:
stem, ext = os.path.splitext(spath)
outfiles.append(stem+'.'+volnum)
else:
# We have to completely rely on the given target name
outfiles.append(t)
__builder.__call__(env, outfiles[0], s, **kw)
env.Depends(outfiles[0], kw['DOCBOOK_XSL'])
result.append(outfiles[0])
if len(outfiles) > 1:
env.Clean(outfiles[0], outfiles[1:])
return result
def DocbookSlidesPdf(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for PDF slides output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESPDF', ['slides','fo','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
t, stem = __ensure_suffix_stem(t, '.pdf')
xsl = __builder.__call__(env, stem+'.fo', s, **kw)
env.Depends(xsl, kw['DOCBOOK_XSL'])
result.extend(xsl)
result.extend(__fop_builder.__call__(env, t, xsl, **kw))
return result
def DocbookSlidesHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML slides output.
"""
# Init list of targets/sources
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESHTML', ['slides','html','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, [os.path.join(base_dir, 'toc.html')] +
glob.glob(os.path.join(base_dir, 'foil*.html')))
return result
def DocbookXInclude(env, target, source, *args, **kw):
"""
A pseudo-Builder, for resolving XIncludes in a separate processing step.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Setup builder
__builder = __select_builder(__xinclude_lxml_builder,__xinclude_libxml2_builder,__xmllint_builder)
# Create targets
result = []
for t,s in zip(target,source):
result.extend(__builder.__call__(env, t, s, **kw))
return result
def DocbookXslt(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, applying a simple XSL transformation to the input file.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
kw['DOCBOOK_XSL'] = kw.get('xsl', 'transform.xsl')
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, t, s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
def generate(env):
"""Add Builders and construction variables for docbook to an Environment."""
env.SetDefault(
# Default names for customized XSL stylesheets
DOCBOOK_DEFAULT_XSL_EPUB = '',
DOCBOOK_DEFAULT_XSL_HTML = '',
DOCBOOK_DEFAULT_XSL_HTMLCHUNKED = '',
DOCBOOK_DEFAULT_XSL_HTMLHELP = '',
DOCBOOK_DEFAULT_XSL_PDF = '',
DOCBOOK_DEFAULT_XSL_MAN = '',
DOCBOOK_DEFAULT_XSL_SLIDESPDF = '',
DOCBOOK_DEFAULT_XSL_SLIDESHTML = '',
# Paths to the detected executables
DOCBOOK_XSLTPROC = '',
DOCBOOK_XMLLINT = '',
DOCBOOK_FOP = '',
# Additional flags for the text processors
DOCBOOK_XSLTPROCFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XMLLINTFLAGS = SCons.Util.CLVar(''),
DOCBOOK_FOPFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XSLTPROCPARAMS = SCons.Util.CLVar(''),
# Default command lines for the detected executables
DOCBOOK_XSLTPROCCOM = xsltproc_com['xsltproc'],
DOCBOOK_XMLLINTCOM = xmllint_com['xmllint'],
DOCBOOK_FOPCOM = fop_com['fop'],
# Screen output for the text processors
DOCBOOK_XSLTPROCCOMSTR = None,
DOCBOOK_XMLLINTCOMSTR = None,
DOCBOOK_FOPCOMSTR = None,
)
_detect(env)
env.AddMethod(DocbookEpub, "DocbookEpub")
env.AddMethod(DocbookHtml, "DocbookHtml")
env.AddMethod(DocbookHtmlChunked, "DocbookHtmlChunked")
env.AddMethod(DocbookHtmlhelp, "DocbookHtmlhelp")
env.AddMethod(DocbookPdf, "DocbookPdf")
env.AddMethod(DocbookMan, "DocbookMan")
env.AddMethod(DocbookSlidesPdf, "DocbookSlidesPdf")
env.AddMethod(DocbookSlidesHtml, "DocbookSlidesHtml")
env.AddMethod(DocbookXInclude, "DocbookXInclude")
env.AddMethod(DocbookXslt, "DocbookXslt")
def exists(env):
return 1
|
mit
| 2,695,215,425,218,211,300
| 32.747696
| 130
| 0.595637
| false
| 3.621785
| false
| false
| false
|
smjhnits/Praktikum_TU_D_16-17
|
Anfängerpraktikum/Protokolle/V355_Gekoppelte_Schwingungen/LaTex-Dateien/Messungc_Plot1.py
|
1
|
2021
|
import numpy as np
from scipy.stats import sem
from uncertainties import ufloat
import uncertainties.unumpy as unp
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
L = 32.51 * 10 ** (-3)
C = 0.801 * 10 ** (-9)
Csp = 0.037 * 10 ** (-9)
R = 48
Start = np.array([30.85, 30.84, 30.83, 30.82, 30.81, 30.80, 30.79, 30.77]) * 10 ** (3)
Stop = np.array([55.05, 50, 40, 40, 40, 40, 40, 40]) * 10 ** (3)
Sweep_Zeit = 2
Zeiten = np.array([1.36, 1, 1.475, 1.125, 0.925, 0.740, 0.6, 0.5])
Nü_positiv = np.array([30.77, 30.79, 30.80, 30.81, 30.82, 30.83, 30.84, 30.85]) * 10 ** (3)
Kopplungskapazitäten = np.array([9.99, 8, 6.47, 5.02, 4.00, 3.00, 2.03, 1.01]) * 10 ** (-9)
C_K_Error = np.array([ufloat(n, 0.003*n) for n in Kopplungskapazitäten])
nu_m_theo = np.array([1 / ( 2 * np.pi * unp.sqrt( L * ( (1/C + 2/n)**(-1) + Csp) ) ) for n in C_K_Error])
nu_p_theo = 1 / ( 2 * np.pi * np.sqrt( L * ( C + Csp) ) )
nu_p_theo1 = np.array([nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo ])
nu_m_theo1 = np.array([unp.nominal_values(n) for n in nu_m_theo])
Differenzen = np.array([ Stop[i]-n for i,n in enumerate(Start)])
Zeitverhältniss = np.array([n/Sweep_Zeit for n in Zeiten])
Abstände = np.array([Differenzen[i]*n for i,n in enumerate(Zeitverhältniss)])
nu_m_expC = np.array([n + Abstände[i] for i,n in enumerate(Start)])
nu_m_expC1 = nu_m_expC[::-1]
plt.plot(Kopplungskapazitäten, unp.nominal_values(nu_m_expC1)*10**(-3), 'bx', label = r'Messung 3.3.1: $\nu_{-}$')
plt.plot(Kopplungskapazitäten, nu_m_theo1*10**(-3), 'rx', label = r'Theoriewerte: $\nu_{-}$')
plt.plot(Kopplungskapazitäten, Nü_positiv*10**(-3), 'mx', label = r'Messung 3.3.1: $\nu_{+}$')
plt.plot(Kopplungskapazitäten, nu_p_theo1*10**(-3), 'yx', label = r'Theoriewerte: $\nu_{+}$')
plt.xlabel(r'$Kopplungskapazität \,\, C_k \,\, in \,\, \mathrm{F}$')
plt.ylabel(r'$Frequenzen \,\, \nu \,\, in \,\, \mathrm{kHz}$')
plt.legend(loc = 'best')
plt.savefig('Messungc_Plot1.pdf')
plt.show()
|
mit
| -687,076,574,687,488,800
| 44.636364
| 114
| 0.616036
| false
| 2.012024
| false
| false
| false
|
Southpaw-TACTIC/Team
|
src/python/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py
|
1
|
16036
|
#####################################################################
#
# editor.py
#
# A general purpose text editor, built on top of the win32ui edit
# type, which is built on an MFC CEditView
#
#
# We now support reloading of externally modified documented
# (eg, presumably by some other process, such as source control or
# another editor.
# We also suport auto-loading of externally modified files.
# - if the current document has not been modified in this
# editor, but has been modified on disk, then the file
# can be automatically reloaded.
#
# Note that it will _always_ prompt you if the file in the editor has been modified.
import win32ui
import win32api
import win32con
import regex
import re
import string
import sys, os
import traceback
from pywin.mfc import docview, dialog, afxres
from pywin.framework.editor import GetEditorOption, SetEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat
patImport=regex.symcomp('import \(<name>.*\)')
patIndent=regex.compile('^\\([ \t]*[~ \t]\\)')
ID_LOCATE_FILE = 0xe200
ID_GOTO_LINE = 0xe2001
MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in document.py and coloreditor.py
# Key Codes that modify the bufffer when Ctrl or Alt are NOT pressed.
MODIFYING_VK_KEYS = [win32con.VK_BACK, win32con.VK_TAB, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
for k in range(48, 91):
MODIFYING_VK_KEYS.append(k)
# Key Codes that modify the bufffer when Ctrl is pressed.
MODIFYING_VK_KEYS_CTRL = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
# Key Codes that modify the bufffer when Alt is pressed.
MODIFYING_VK_KEYS_ALT = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
# The editor itself starts here.
# Using the MFC Document/View model, we have an EditorDocument, which is responsible for
# managing the contents of the file, and a view which is responsible for rendering it.
#
# Due to a limitation in the Windows edit controls, we are limited to one view
# per document, although nothing in this code assumes this (I hope!)
isRichText=1 # We are using the Rich Text control. This has not been tested with value "0" for quite some time!
#ParentEditorDocument=docview.Document
from document import EditorDocumentBase
ParentEditorDocument=EditorDocumentBase
class EditorDocument(ParentEditorDocument):
#
# File loading and saving operations
#
def OnOpenDocument(self, filename):
#
# handle Unix and PC text file format.
#
# Get the "long name" of the file name, as it may have been translated
# to short names by the shell.
self.SetPathName(filename) # Must set this early!
# Now do the work!
self.BeginWaitCursor()
win32ui.SetStatusText("Loading file...",1)
try:
f = open(filename,"rb")
except IOError:
win32ui.MessageBox(filename + '\nCan not find this file\nPlease verify that the correct path and file name are given')
self.EndWaitCursor()
return 0
raw=f.read()
f.close()
contents = self.TranslateLoadedData(raw)
rc = 0
if win32ui.IsWin32s() and len(contents)>62000: # give or take a few bytes
win32ui.MessageBox("This file is too big for Python on Windows 3.1\r\nPlease use another editor to view this file.")
else:
try:
self.GetFirstView().SetWindowText(contents)
rc = 1
except TypeError: # Null byte in file.
win32ui.MessageBox("This file contains NULL bytes, and can not be edited")
rc = 0
self.EndWaitCursor()
self.SetModifiedFlag(0) # No longer dirty
self._DocumentStateChanged()
return rc
def TranslateLoadedData(self, data):
"""Given raw data read from a file, massage it suitable for the edit window"""
# if a CR in the first 250 chars, then perform the expensive translate
if string.find(data[:250],'\r')==-1:
win32ui.SetStatusText("Translating from Unix file format - please wait...",1)
return re.sub('\r*\n','\r\n',data)
else:
return data
def SaveFile(self, fileName):
if isRichText:
view = self.GetFirstView()
view.SaveTextFile(fileName)
else: # Old style edit view window.
self.GetFirstView().SaveFile(fileName)
try:
# Make sure line cache has updated info about me!
import linecache
linecache.checkcache()
except:
pass
#
# Color state stuff
#
def SetAllLineColors(self, color = None):
for view in self.GetAllViews():
view.SetAllLineColors(color)
def SetLineColor(self, lineNo, color):
"Color a line of all views"
for view in self.GetAllViews():
view.SetLineColor(lineNo, color)
# def StreamTextOut(self, data): ### This seems unreliable???
# self.saveFileHandle.write(data)
# return 1 # keep em coming!
#ParentEditorView=docview.EditView
ParentEditorView=docview.RichEditView
class EditorView(ParentEditorView):
def __init__(self, doc):
ParentEditorView.__init__(self, doc)
if isRichText:
self.SetWordWrap(win32ui.CRichEditView_WrapNone)
self.addToMRU = 1
self.HookHandlers()
self.bCheckingFile = 0
self.defCharFormat = GetEditorFontOption("Default Font", defaultCharacterFormat)
# Smart tabs override everything else if context can be worked out.
self.bSmartTabs = GetEditorOption("Smart Tabs", 1)
self.tabSize = GetEditorOption("Tab Size", 8)
self.indentSize = GetEditorOption("Indent Size", 8)
# If next indent is at a tab position, and useTabs is set, a tab will be inserted.
self.bUseTabs = GetEditorOption("Use Tabs", 1)
def OnInitialUpdate(self):
rc = self._obj_.OnInitialUpdate()
self.SetDefaultCharFormat(self.defCharFormat)
return rc
def CutCurLine(self):
curLine = self._obj_.LineFromChar()
nextLine = curLine+1
start = self._obj_.LineIndex(curLine)
end = self._obj_.LineIndex(nextLine)
if end==0: # must be last line.
end = start + self.end.GetLineLength(curLine)
self._obj_.SetSel(start,end)
self._obj_.Cut()
def _PrepareUserStateChange(self):
"Return selection, lineindex, etc info, so it can be restored"
self.SetRedraw(0)
return self.GetModify(), self.GetSel(), self.GetFirstVisibleLine()
def _EndUserStateChange(self, info):
scrollOff = info[2] - self.GetFirstVisibleLine()
if scrollOff:
self.LineScroll(scrollOff)
self.SetSel(info[1])
self.SetModify(info[0])
self.SetRedraw(1)
self.InvalidateRect()
self.UpdateWindow()
def _UpdateUIForState(self):
self.SetReadOnly(self.GetDocument()._IsReadOnly())
def SetAllLineColors(self, color = None):
if isRichText:
info = self._PrepareUserStateChange()
try:
if color is None: color = self.defCharFormat[4]
self.SetSel(0,-1)
self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color))
finally:
self._EndUserStateChange(info)
def SetLineColor(self, lineNo, color):
"lineNo is the 1 based line number to set. If color is None, default color is used."
if isRichText:
info = self._PrepareUserStateChange()
try:
if color is None: color = self.defCharFormat[4]
lineNo = lineNo-1
startIndex = self.LineIndex(lineNo)
if startIndex!=-1:
self.SetSel(startIndex, self.LineIndex(lineNo+1))
self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color))
finally:
self._EndUserStateChange(info)
def Indent(self):
"""Insert an indent to move the cursor to the next tab position.
Honors the tab size and 'use tabs' settings. Assumes the cursor is already at the
position to be indented, and the selection is a single character (ie, not a block)
"""
start, end = self._obj_.GetSel()
startLine = self._obj_.LineFromChar(start)
line = self._obj_.GetLine(startLine)
realCol = start - self._obj_.LineIndex(startLine)
# Calulate the next tab stop.
# Expand existing tabs.
curCol = 0
for ch in line[:realCol]:
if ch=='\t':
curCol = ((curCol / self.tabSize) + 1) * self.tabSize
else:
curCol = curCol + 1
nextColumn = ((curCol / self.indentSize) + 1) * self.indentSize
# print "curCol is", curCol, "nextColumn is", nextColumn
ins = None
if self.bSmartTabs:
# Look for some context.
if realCol==0: # Start of the line - see if the line above can tell us
lookLine = startLine-1
while lookLine >= 0:
check = self._obj_.GetLine(lookLine)[0:1]
if check in ['\t', ' ']:
ins = check
break
lookLine = lookLine - 1
else: # See if the previous char can tell us
check = line[realCol-1]
if check in ['\t', ' ']:
ins = check
# Either smart tabs off, or not smart enough!
# Use the "old style" settings.
if ins is None:
if self.bUseTabs and nextColumn % self.tabSize==0:
ins = '\t'
else:
ins = ' '
if ins == ' ':
# Calc the number of spaces to take us to the next stop
ins = ins * (nextColumn - curCol)
self._obj_.ReplaceSel(ins)
def BlockDent(self, isIndent, startLine, endLine):
" Indent/Undent all lines specified "
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
tabSize=self.tabSize # hard-code for now!
info = self._PrepareUserStateChange()
try:
for lineNo in range(startLine, endLine):
pos=self._obj_.LineIndex(lineNo)
self._obj_.SetSel(pos, pos)
if isIndent:
self.Indent()
else:
line = self._obj_.GetLine(lineNo)
try:
noToDel = 0
if line[0]=='\t':
noToDel = 1
elif line[0]==' ':
for noToDel in range(0,tabSize):
if line[noToDel]!=' ':
break
else:
noToDel=tabSize
if noToDel:
self._obj_.SetSel(pos, pos+noToDel)
self._obj_.Clear()
except IndexError:
pass
finally:
self._EndUserStateChange(info)
self.GetDocument().SetModifiedFlag(1) # Now dirty
self._obj_.SetSel(self.LineIndex(startLine), self.LineIndex(endLine))
def GotoLine(self, lineNo = None):
try:
if lineNo is None:
lineNo = string.atoi(raw_input("Enter Line Number"))
except (ValueError, KeyboardInterrupt):
return 0
self.GetLineCount() # Seems to be needed when file first opened???
charNo = self.LineIndex(lineNo-1)
self.SetSel(charNo)
def HookHandlers(self): # children can override, but should still call me!
# self.HookAllKeyStrokes(self.OnKey)
self.HookMessage(self.OnCheckExternalDocumentUpdated,MSG_CHECK_EXTERNAL_FILE)
self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN)
self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS)
self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
self.HookKeyStroke(self.OnKeyCtrlY, 25) # ^Y
self.HookKeyStroke(self.OnKeyCtrlG, 7) # ^G
self.HookKeyStroke(self.OnKeyTab, 9) # TAB
self.HookKeyStroke(self.OnKeyEnter, 13) # Enter
self.HookCommand(self.OnCmdLocateFile, ID_LOCATE_FILE)
self.HookCommand(self.OnCmdGotoLine, ID_GOTO_LINE)
self.HookCommand(self.OnEditPaste, afxres.ID_EDIT_PASTE)
self.HookCommand(self.OnEditCut, afxres.ID_EDIT_CUT)
# Hook Handlers
def OnSetFocus(self,msg):
# Even though we use file change notifications, we should be very sure about it here.
self.OnCheckExternalDocumentUpdated(msg)
def OnRClick(self,params):
menu = win32ui.CreatePopupMenu()
# look for a module name
line=string.strip(self._obj_.GetLine())
flags=win32con.MF_STRING|win32con.MF_ENABLED
if patImport.match(line)==len(line):
menu.AppendMenu(flags, ID_LOCATE_FILE, "&Locate %s.py"%patImport.group('name'))
menu.AppendMenu(win32con.MF_SEPARATOR);
menu.AppendMenu(flags, win32ui.ID_EDIT_UNDO, '&Undo')
menu.AppendMenu(win32con.MF_SEPARATOR);
menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, 'Cu&t')
menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, '&Copy')
menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, '&Paste')
menu.AppendMenu(flags, win32con.MF_SEPARATOR);
menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all')
menu.AppendMenu(flags, win32con.MF_SEPARATOR);
menu.AppendMenu(flags, ID_GOTO_LINE, '&Goto line...')
menu.TrackPopupMenu(params[5])
return 0
def OnCmdGotoLine(self, cmd, code):
self.GotoLine()
return 0
def OnCmdLocateFile(self, cmd, code):
modName = patImport.group('name')
if not modName:
return 0
import pywin.framework.scriptutils
fileName = pywin.framework.scriptutils.LocatePythonFile(modName)
if fileName is None:
win32ui.SetStatusText("Can't locate module %s" % modName)
else:
win32ui.GetApp().OpenDocumentFile(fileName)
return 0
# Key handlers
def OnKeyEnter(self, key):
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
curLine = self._obj_.GetLine()
self._obj_.ReplaceSel('\r\n') # insert the newline
# If the current line indicates the next should be indented,
# then copy the current indentation to this line.
res = patIndent.match(curLine,0)
if res>0 and string.strip(curLine):
curIndent = patIndent.group(1)
self._obj_.ReplaceSel(curIndent)
return 0 # dont pass on
def OnKeyCtrlY(self, key):
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
self.CutCurLine()
return 0 # dont let him have it!
def OnKeyCtrlG(self, key):
self.GotoLine()
return 0 # dont let him have it!
def OnKeyTab(self, key):
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
start, end = self._obj_.GetSel()
if start==end: # normal TAB key
self.Indent()
return 0 # we handled this.
# Otherwise it is a block indent/dedent.
if start>end:
start, end = end, start # swap them.
startLine = self._obj_.LineFromChar(start)
endLine = self._obj_.LineFromChar(end)
self.BlockDent(win32api.GetKeyState(win32con.VK_SHIFT)>=0, startLine, endLine)
return 0
def OnEditPaste(self, id, code):
# Return 1 if we can make the file editable.(or it already is!)
return self.GetDocument().CheckMakeDocumentWritable()
def OnEditCut(self, id, code):
# Return 1 if we can make the file editable.(or it already is!)
return self.GetDocument().CheckMakeDocumentWritable()
def OnKeyDown(self, msg):
key = msg[2]
if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000:
modList = MODIFYING_VK_KEYS_CTRL
elif win32api.GetKeyState(win32con.VK_MENU) & 0x8000:
modList = MODIFYING_VK_KEYS_ALT
else:
modList = MODIFYING_VK_KEYS
if key in modList:
# Return 1 if we can make the file editable.(or it already is!)
return self.GetDocument().CheckMakeDocumentWritable()
return 1 # Pass it on OK
# def OnKey(self, key):
# return self.GetDocument().CheckMakeDocumentWritable()
def OnCheckExternalDocumentUpdated(self, msg):
if self._obj_ is None or self.bCheckingFile: return
self.bCheckingFile = 1
self.GetDocument().CheckExternalDocumentUpdated()
self.bCheckingFile = 0
from template import EditorTemplateBase
class EditorTemplate(EditorTemplateBase):
def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None):
if makeDoc is None: makeDoc = EditorDocument
if makeView is None: makeView = EditorView
EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView)
def _CreateDocTemplate(self, resourceId):
return win32ui.CreateRichEditDocTemplate(resourceId)
def CreateWin32uiDocument(self):
return self.DoCreateRichEditDoc()
def Create(fileName = None, title=None, template = None):
return editorTemplate.OpenDocumentFile(fileName)
from pywin.framework.editor import GetDefaultEditorModuleName
prefModule = GetDefaultEditorModuleName()
# Initialize only if this is the "default" editor.
if __name__==prefModule:
# For debugging purposes, when this module may be reloaded many times.
try:
win32ui.GetApp().RemoveDocTemplate(editorTemplate)
except (NameError, win32ui.error):
pass
editorTemplate = EditorTemplate()
win32ui.GetApp().AddDocTemplate(editorTemplate)
|
epl-1.0
| 6,208,114,795,459,562,000
| 32.486022
| 133
| 0.695373
| false
| 3.128365
| false
| false
| false
|
ganeti/ganeti
|
lib/config/__init__.py
|
1
|
111167
|
#
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Configuration management for Ganeti
This module provides the interface to the Ganeti cluster configuration.
The configuration data is stored on every node but is updated on the master
only. After each update, the master distributes the data to the other nodes.
Currently, the data storage format is JSON. YAML was slow and consuming too
much memory.
"""
# TODO: Break up this file into multiple chunks - Wconfd RPC calls, local config
# manipulations, grouped by object they operate on (cluster/instance/disk)
# pylint: disable=C0302
# pylint: disable=R0904
# R0904: Too many public methods
import copy
import os
import random
import logging
import time
import threading
import itertools
from ganeti.config.temporary_reservations import TemporaryReservationManager
from ganeti.config.utils import ConfigSync, ConfigManager
from ganeti.config.verify import (VerifyType, VerifyNic, VerifyIpolicy,
ValidateConfig)
from ganeti import errors
from ganeti import utils
from ganeti import constants
import ganeti.wconfd as wc
from ganeti import objects
from ganeti import serializer
from ganeti import uidpool
from ganeti import netutils
from ganeti import runtime
from ganeti import pathutils
from ganeti import network
def GetWConfdContext(ec_id, livelock):
"""Prepare a context for communication with WConfd.
WConfd needs to know the identity of each caller to properly manage locks and
detect job death. This helper function prepares the identity object given a
job ID (optional) and a livelock file.
@type ec_id: int, or None
@param ec_id: the job ID or None, if the caller isn't a job
@type livelock: L{ganeti.utils.livelock.LiveLock}
@param livelock: a livelock object holding the lockfile needed for WConfd
@return: the WConfd context
"""
if ec_id is None:
return (threading.current_thread().getName(),
livelock.GetPath(), os.getpid())
else:
return (ec_id,
livelock.GetPath(), os.getpid())
def GetConfig(ec_id, livelock, **kwargs):
"""A utility function for constructing instances of ConfigWriter.
It prepares a WConfd context and uses it to create a ConfigWriter instance.
@type ec_id: int, or None
@param ec_id: the job ID or None, if the caller isn't a job
@type livelock: L{ganeti.utils.livelock.LiveLock}
@param livelock: a livelock object holding the lockfile needed for WConfd
@type kwargs: dict
@param kwargs: Any additional arguments for the ConfigWriter constructor
@rtype: L{ConfigWriter}
@return: the ConfigWriter context
"""
kwargs['wconfdcontext'] = GetWConfdContext(ec_id, livelock)
# if the config is to be opened in the accept_foreign mode, we should
# also tell the RPC client not to check for the master node
accept_foreign = kwargs.get('accept_foreign', False)
kwargs['wconfd'] = wc.Client(allow_non_master=accept_foreign)
return ConfigWriter(**kwargs)
# job id used for resource management at config upgrade time
_UPGRADE_CONFIG_JID = "jid-cfg-upgrade"
def _MatchNameComponentIgnoreCase(short_name, names):
"""Wrapper around L{utils.text.MatchNameComponent}.
"""
return utils.MatchNameComponent(short_name, names, case_sensitive=False)
def _CheckInstanceDiskIvNames(disks):
"""Checks if instance's disks' C{iv_name} attributes are in order.
@type disks: list of L{objects.Disk}
@param disks: List of disks
@rtype: list of tuples; (int, string, string)
@return: List of wrongly named disks, each tuple contains disk index,
expected and actual name
"""
result = []
for (idx, disk) in enumerate(disks):
exp_iv_name = "disk/%s" % idx
if disk.iv_name != exp_iv_name:
result.append((idx, exp_iv_name, disk.iv_name))
return result
class ConfigWriter(object):
"""The interface to the cluster configuration.
WARNING: The class is no longer thread-safe!
Each thread must construct a separate instance.
@ivar _all_rms: a list of all temporary reservation managers
Currently the class fulfills 3 main functions:
1. lock the configuration for access (monitor)
2. reload and write the config if necessary (bridge)
3. provide convenient access methods to config data (facade)
"""
def __init__(self, cfg_file=None, offline=False, _getents=runtime.GetEnts,
accept_foreign=False, wconfdcontext=None, wconfd=None):
self.write_count = 0
self._config_data = None
self._SetConfigData(None)
self._offline = offline
if cfg_file is None:
self._cfg_file = pathutils.CLUSTER_CONF_FILE
else:
self._cfg_file = cfg_file
self._getents = _getents
self._temporary_ids = TemporaryReservationManager()
self._all_rms = [self._temporary_ids]
# Note: in order to prevent errors when resolving our name later,
# we compute it here once and reuse it; it's
# better to raise an error before starting to modify the config
# file than after it was modified
self._my_hostname = netutils.Hostname.GetSysName()
self._cfg_id = None
self._wconfdcontext = wconfdcontext
self._wconfd = wconfd
self._accept_foreign = accept_foreign
self._lock_count = 0
self._lock_current_shared = None
self._lock_forced = False
def _ConfigData(self):
return self._config_data
def OutDate(self):
self._config_data = None
def _SetConfigData(self, cfg):
self._config_data = cfg
def _GetWConfdContext(self):
return self._wconfdcontext
# this method needs to be static, so that we can call it on the class
@staticmethod
def IsCluster():
"""Check if the cluster is configured.
"""
return os.path.exists(pathutils.CLUSTER_CONF_FILE)
def _UnlockedGetNdParams(self, node):
nodegroup = self._UnlockedGetNodeGroup(node.group)
return self._ConfigData().cluster.FillND(node, nodegroup)
@ConfigSync(shared=1)
def GetNdParams(self, node):
"""Get the node params populated with cluster defaults.
@type node: L{objects.Node}
@param node: The node we want to know the params for
@return: A dict with the filled in node params
"""
return self._UnlockedGetNdParams(node)
@ConfigSync(shared=1)
def GetNdGroupParams(self, nodegroup):
"""Get the node groups params populated with cluster defaults.
@type nodegroup: L{objects.NodeGroup}
@param nodegroup: The node group we want to know the params for
@return: A dict with the filled in node group params
"""
return self._UnlockedGetNdGroupParams(nodegroup)
def _UnlockedGetNdGroupParams(self, group):
"""Get the ndparams of the group.
@type group: L{objects.NodeGroup}
@param group: The group we want to know the params for
@rtype: dict of str to int
@return: A dict with the filled in node group params
"""
return self._ConfigData().cluster.FillNDGroup(group)
@ConfigSync(shared=1)
def GetGroupSshPorts(self):
"""Get a map of group UUIDs to SSH ports.
@rtype: dict of str to int
@return: a dict mapping the UUIDs to the SSH ports
"""
port_map = {}
for uuid, group in self._config_data.nodegroups.items():
ndparams = self._UnlockedGetNdGroupParams(group)
port = ndparams.get(constants.ND_SSH_PORT)
port_map[uuid] = port
return port_map
@ConfigSync(shared=1)
def GetInstanceDiskParams(self, instance):
"""Get the disk params populated with inherit chain.
@type instance: L{objects.Instance}
@param instance: The instance we want to know the params for
@return: A dict with the filled in disk params
"""
node = self._UnlockedGetNodeInfo(instance.primary_node)
nodegroup = self._UnlockedGetNodeGroup(node.group)
return self._UnlockedGetGroupDiskParams(nodegroup)
def _UnlockedGetInstanceDisks(self, inst_uuid):
"""Return the disks' info for the given instance
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to know the disks for
@rtype: List of L{objects.Disk}
@return: A list with all the disks' info
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if instance is None:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
return [self._UnlockedGetDiskInfo(disk_uuid)
for disk_uuid in instance.disks]
@ConfigSync(shared=1)
def GetInstanceDisks(self, inst_uuid):
"""Return the disks' info for the given instance
This is a simple wrapper over L{_UnlockedGetInstanceDisks}.
"""
return self._UnlockedGetInstanceDisks(inst_uuid)
def AddInstanceDisk(self, inst_uuid, disk, idx=None, replace=False):
"""Add a disk to the config and attach it to instance."""
if not isinstance(disk, objects.Disk):
raise errors.ProgrammerError("Invalid type passed to AddInstanceDisk")
disk.UpgradeConfig()
utils.SimpleRetry(True, self._wconfd.AddInstanceDisk, 0.1, 30,
args=[inst_uuid, disk.ToDict(), idx, replace])
self.OutDate()
def AttachInstanceDisk(self, inst_uuid, disk_uuid, idx=None):
"""Attach an existing disk to an instance."""
utils.SimpleRetry(True, self._wconfd.AttachInstanceDisk, 0.1, 30,
args=[inst_uuid, disk_uuid, idx])
self.OutDate()
def _UnlockedRemoveDisk(self, disk_uuid):
"""Remove the disk from the configuration.
@type disk_uuid: string
@param disk_uuid: The UUID of the disk object
"""
if disk_uuid not in self._ConfigData().disks:
raise errors.ConfigurationError("Disk %s doesn't exist" % disk_uuid)
# Disk must not be attached anywhere
for inst in self._ConfigData().instances.values():
if disk_uuid in inst.disks:
raise errors.ReservationError("Cannot remove disk %s. Disk is"
" attached to instance %s"
% (disk_uuid, inst.name))
# Remove disk from config file
del self._ConfigData().disks[disk_uuid]
self._ConfigData().cluster.serial_no += 1
def RemoveInstanceDisk(self, inst_uuid, disk_uuid):
"""Detach a disk from an instance and remove it from the config."""
utils.SimpleRetry(True, self._wconfd.RemoveInstanceDisk, 0.1, 30,
args=[inst_uuid, disk_uuid])
self.OutDate()
def DetachInstanceDisk(self, inst_uuid, disk_uuid):
"""Detach a disk from an instance."""
utils.SimpleRetry(True, self._wconfd.DetachInstanceDisk, 0.1, 30,
args=[inst_uuid, disk_uuid])
self.OutDate()
def _UnlockedGetDiskInfo(self, disk_uuid):
"""Returns information about a disk.
It takes the information from the configuration file.
@param disk_uuid: UUID of the disk
@rtype: L{objects.Disk}
@return: the disk object
"""
if disk_uuid not in self._ConfigData().disks:
return None
return self._ConfigData().disks[disk_uuid]
@ConfigSync(shared=1)
def GetDiskInfo(self, disk_uuid):
"""Returns information about a disk.
This is a simple wrapper over L{_UnlockedGetDiskInfo}.
"""
return self._UnlockedGetDiskInfo(disk_uuid)
def _UnlockedGetDiskInfoByName(self, disk_name):
"""Return information about a named disk.
Return disk information from the configuration file, searching with the
name of the disk.
@param disk_name: Name of the disk
@rtype: L{objects.Disk}
@return: the disk object
"""
disk = None
count = 0
for d in self._ConfigData().disks.values():
if d.name == disk_name:
count += 1
disk = d
if count > 1:
raise errors.ConfigurationError("There are %s disks with this name: %s"
% (count, disk_name))
return disk
@ConfigSync(shared=1)
def GetDiskInfoByName(self, disk_name):
"""Return information about a named disk.
This is a simple wrapper over L{_UnlockedGetDiskInfoByName}.
"""
return self._UnlockedGetDiskInfoByName(disk_name)
def _UnlockedGetDiskList(self):
"""Get the list of disks.
@return: array of disks, ex. ['disk2-uuid', 'disk1-uuid']
"""
return list(self._ConfigData().disks)
@ConfigSync(shared=1)
def GetAllDisksInfo(self):
"""Get the configuration of all disks.
This is a simple wrapper over L{_UnlockedGetAllDisksInfo}.
"""
return self._UnlockedGetAllDisksInfo()
def _UnlockedGetAllDisksInfo(self):
"""Get the configuration of all disks.
@rtype: dict
@return: dict of (disk, disk_info), where disk_info is what
would GetDiskInfo return for the node
"""
my_dict = dict([(disk_uuid, self._UnlockedGetDiskInfo(disk_uuid))
for disk_uuid in self._UnlockedGetDiskList()])
return my_dict
def _AllInstanceNodes(self, inst_uuid):
"""Compute the set of all disk-related nodes for an instance.
This abstracts away some work from '_UnlockedGetInstanceNodes'
and '_UnlockedGetInstanceSecondaryNodes'.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to get nodes for
@rtype: set of strings
@return: A set of names for all the nodes of the instance
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if instance is None:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
instance_disks = self._UnlockedGetInstanceDisks(inst_uuid)
all_nodes = []
for disk in instance_disks:
all_nodes.extend(disk.all_nodes)
return (set(all_nodes), instance)
def _UnlockedGetInstanceNodes(self, inst_uuid):
"""Get all disk-related nodes for an instance.
For non-DRBD instances, this will contain only the instance's primary node,
whereas for DRBD instances, it will contain both the primary and the
secondaries.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to get nodes for
@rtype: list of strings
@return: A list of names for all the nodes of the instance
"""
(all_nodes, instance) = self._AllInstanceNodes(inst_uuid)
# ensure that primary node is always the first
all_nodes.discard(instance.primary_node)
return (instance.primary_node, ) + tuple(all_nodes)
@ConfigSync(shared=1)
def GetInstanceNodes(self, inst_uuid):
"""Get all disk-related nodes for an instance.
This is just a wrapper over L{_UnlockedGetInstanceNodes}
"""
return self._UnlockedGetInstanceNodes(inst_uuid)
def _UnlockedGetInstanceSecondaryNodes(self, inst_uuid):
"""Get the list of secondary nodes.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to get nodes for
@rtype: list of strings
@return: A tuple of names for all the secondary nodes of the instance
"""
(all_nodes, instance) = self._AllInstanceNodes(inst_uuid)
all_nodes.discard(instance.primary_node)
return tuple(all_nodes)
@ConfigSync(shared=1)
def GetInstanceSecondaryNodes(self, inst_uuid):
"""Get the list of secondary nodes.
This is a simple wrapper over L{_UnlockedGetInstanceSecondaryNodes}.
"""
return self._UnlockedGetInstanceSecondaryNodes(inst_uuid)
def _UnlockedGetInstanceLVsByNode(self, inst_uuid, lvmap=None):
"""Provide a mapping of node to LVs a given instance owns.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to
compute the LVsByNode for
@type lvmap: dict
@param lvmap: Optional dictionary to receive the
'node' : ['lv', ...] data.
@rtype: dict or None
@return: None if lvmap arg is given, otherwise, a dictionary of
the form { 'node_uuid' : ['volume1', 'volume2', ...], ... };
volumeN is of the form "vg_name/lv_name", compatible with
GetVolumeList()
"""
def _MapLVsByNode(lvmap, devices, node_uuid):
"""Recursive helper function."""
if not node_uuid in lvmap:
lvmap[node_uuid] = []
for dev in devices:
if dev.dev_type == constants.DT_PLAIN:
if not dev.forthcoming:
lvmap[node_uuid].append(dev.logical_id[0] + "/" + dev.logical_id[1])
elif dev.dev_type in constants.DTS_DRBD:
if dev.children:
_MapLVsByNode(lvmap, dev.children, dev.logical_id[0])
_MapLVsByNode(lvmap, dev.children, dev.logical_id[1])
elif dev.children:
_MapLVsByNode(lvmap, dev.children, node_uuid)
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if instance is None:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
if lvmap is None:
lvmap = {}
ret = lvmap
else:
ret = None
_MapLVsByNode(lvmap,
self._UnlockedGetInstanceDisks(instance.uuid),
instance.primary_node)
return ret
@ConfigSync(shared=1)
def GetInstanceLVsByNode(self, inst_uuid, lvmap=None):
"""Provide a mapping of node to LVs a given instance owns.
This is a simple wrapper over L{_UnlockedGetInstanceLVsByNode}
"""
return self._UnlockedGetInstanceLVsByNode(inst_uuid, lvmap=lvmap)
@ConfigSync(shared=1)
def GetGroupDiskParams(self, group):
"""Get the disk params populated with inherit chain.
@type group: L{objects.NodeGroup}
@param group: The group we want to know the params for
@return: A dict with the filled in disk params
"""
return self._UnlockedGetGroupDiskParams(group)
def _UnlockedGetGroupDiskParams(self, group):
"""Get the disk params populated with inherit chain down to node-group.
@type group: L{objects.NodeGroup}
@param group: The group we want to know the params for
@return: A dict with the filled in disk params
"""
data = self._ConfigData().cluster.SimpleFillDP(group.diskparams)
assert isinstance(data, dict), "Not a dictionary: " + str(data)
return data
@ConfigSync(shared=1)
def GetPotentialMasterCandidates(self):
"""Gets the list of node names of potential master candidates.
@rtype: list of str
@return: list of node names of potential master candidates
"""
# FIXME: Note that currently potential master candidates are nodes
# but this definition will be extended once RAPI-unmodifiable
# parameters are introduced.
nodes = self._UnlockedGetAllNodesInfo()
return [node_info.name for node_info in nodes.values()]
def GenerateMAC(self, net_uuid, _ec_id):
"""Generate a MAC for an instance.
This should check the current instances for duplicates.
"""
return self._wconfd.GenerateMAC(self._GetWConfdContext(), net_uuid)
def ReserveMAC(self, mac, _ec_id):
"""Reserve a MAC for an instance.
This only checks instances managed by this cluster, it does not
check for potential collisions elsewhere.
"""
self._wconfd.ReserveMAC(self._GetWConfdContext(), mac)
@ConfigSync(shared=1)
def CommitTemporaryIps(self, _ec_id):
"""Tell WConfD to commit all temporary ids"""
self._wconfd.CommitTemporaryIps(self._GetWConfdContext())
def ReleaseIp(self, net_uuid, address, _ec_id):
"""Give a specific IP address back to an IP pool.
The IP address is returned to the IP pool and marked as reserved.
"""
if net_uuid:
if self._offline:
raise errors.ProgrammerError("Can't call ReleaseIp in offline mode")
self._wconfd.ReleaseIp(self._GetWConfdContext(), net_uuid, address)
def GenerateIp(self, net_uuid, _ec_id):
"""Find a free IPv4 address for an instance.
"""
if self._offline:
raise errors.ProgrammerError("Can't call GenerateIp in offline mode")
return self._wconfd.GenerateIp(self._GetWConfdContext(), net_uuid)
def ReserveIp(self, net_uuid, address, _ec_id, check=True):
"""Reserve a given IPv4 address for use by an instance.
"""
if self._offline:
raise errors.ProgrammerError("Can't call ReserveIp in offline mode")
return self._wconfd.ReserveIp(self._GetWConfdContext(), net_uuid, address,
check)
def ReserveLV(self, lv_name, _ec_id):
"""Reserve an VG/LV pair for an instance.
@type lv_name: string
@param lv_name: the logical volume name to reserve
"""
return self._wconfd.ReserveLV(self._GetWConfdContext(), lv_name)
def GenerateDRBDSecret(self, _ec_id):
"""Generate a DRBD secret.
This checks the current disks for duplicates.
"""
return self._wconfd.GenerateDRBDSecret(self._GetWConfdContext())
# FIXME: After _AllIDs is removed, move it to config_mock.py
def _AllLVs(self):
"""Compute the list of all LVs.
"""
lvnames = set()
for instance in self._ConfigData().instances.values():
node_data = self._UnlockedGetInstanceLVsByNode(instance.uuid)
for lv_list in node_data.values():
lvnames.update(lv_list)
return lvnames
def _AllNICs(self):
"""Compute the list of all NICs.
"""
nics = []
for instance in self._ConfigData().instances.values():
nics.extend(instance.nics)
return nics
def _AllIDs(self, include_temporary):
"""Compute the list of all UUIDs and names we have.
@type include_temporary: boolean
@param include_temporary: whether to include the _temporary_ids set
@rtype: set
@return: a set of IDs
"""
existing = set()
if include_temporary:
existing.update(self._temporary_ids.GetReserved())
existing.update(self._AllLVs())
existing.update(self._ConfigData().instances)
existing.update(self._ConfigData().nodes)
existing.update([i.uuid for i in self._AllUUIDObjects() if i.uuid])
return existing
def _GenerateUniqueID(self, ec_id):
"""Generate an unique UUID.
This checks the current node, instances and disk names for
duplicates.
@rtype: string
@return: the unique id
"""
existing = self._AllIDs(include_temporary=False)
return self._temporary_ids.Generate(existing, utils.NewUUID, ec_id)
@ConfigSync(shared=1)
def GenerateUniqueID(self, ec_id):
"""Generate an unique ID.
This is just a wrapper over the unlocked version.
@type ec_id: string
@param ec_id: unique id for the job to reserve the id to
"""
return self._GenerateUniqueID(ec_id)
def _AllMACs(self):
"""Return all MACs present in the config.
@rtype: list
@return: the list of all MACs
"""
result = []
for instance in self._ConfigData().instances.values():
for nic in instance.nics:
result.append(nic.mac)
return result
def _AllDRBDSecrets(self):
"""Return all DRBD secrets present in the config.
@rtype: list
@return: the list of all DRBD secrets
"""
def helper(disk, result):
"""Recursively gather secrets from this disk."""
if disk.dev_type == constants.DT_DRBD8:
result.append(disk.logical_id[5])
if disk.children:
for child in disk.children:
helper(child, result)
result = []
for disk in self._ConfigData().disks.values():
helper(disk, result)
return result
@staticmethod
def _VerifyDisks(data, result):
"""Per-disk verification checks
Extends L{result} with diagnostic information about the disks.
@type data: see L{_ConfigData}
@param data: configuration data
@type result: list of strings
@param result: list containing diagnostic messages
"""
for disk_uuid in data.disks:
disk = data.disks[disk_uuid]
result.extend(["disk %s error: %s" % (disk.uuid, msg)
for msg in disk.Verify()])
if disk.uuid != disk_uuid:
result.append("disk '%s' is indexed by wrong UUID '%s'" %
(disk.name, disk_uuid))
def _UnlockedVerifyConfig(self):
"""Verify function.
@rtype: list
@return: a list of error messages; a non-empty list signifies
configuration errors
"""
# pylint: disable=R0914
result = []
seen_macs = []
ports = {}
data = self._ConfigData()
cluster = data.cluster
# First call WConfd to perform its checks, if we're not offline
if not self._offline:
try:
self._wconfd.VerifyConfig()
except errors.ConfigVerifyError as err:
try:
for msg in err.args[1]:
result.append(msg)
except IndexError:
pass
# check cluster parameters
VerifyType("cluster", "beparams", cluster.SimpleFillBE({}),
constants.BES_PARAMETER_TYPES, result.append)
VerifyType("cluster", "nicparams", cluster.SimpleFillNIC({}),
constants.NICS_PARAMETER_TYPES, result.append)
VerifyNic("cluster", cluster.SimpleFillNIC({}), result.append)
VerifyType("cluster", "ndparams", cluster.SimpleFillND({}),
constants.NDS_PARAMETER_TYPES, result.append)
VerifyIpolicy("cluster", cluster.ipolicy, True, result.append)
for disk_template in cluster.diskparams:
if disk_template not in constants.DTS_HAVE_ACCESS:
continue
access = cluster.diskparams[disk_template].get(constants.LDP_ACCESS,
constants.DISK_KERNELSPACE)
if access not in constants.DISK_VALID_ACCESS_MODES:
result.append(
"Invalid value of '%s:%s': '%s' (expected one of %s)" % (
disk_template, constants.LDP_ACCESS, access,
utils.CommaJoin(constants.DISK_VALID_ACCESS_MODES)
)
)
self._VerifyDisks(data, result)
# per-instance checks
for instance_uuid in data.instances:
instance = data.instances[instance_uuid]
if instance.uuid != instance_uuid:
result.append("instance '%s' is indexed by wrong UUID '%s'" %
(instance.name, instance_uuid))
if instance.primary_node not in data.nodes:
result.append("instance '%s' has invalid primary node '%s'" %
(instance.name, instance.primary_node))
for snode in self._UnlockedGetInstanceSecondaryNodes(instance.uuid):
if snode not in data.nodes:
result.append("instance '%s' has invalid secondary node '%s'" %
(instance.name, snode))
for idx, nic in enumerate(instance.nics):
if nic.mac in seen_macs:
result.append("instance '%s' has NIC %d mac %s duplicate" %
(instance.name, idx, nic.mac))
else:
seen_macs.append(nic.mac)
if nic.nicparams:
filled = cluster.SimpleFillNIC(nic.nicparams)
owner = "instance %s nic %d" % (instance.name, idx)
VerifyType(owner, "nicparams",
filled, constants.NICS_PARAMETER_TYPES, result.append)
VerifyNic(owner, filled, result.append)
# parameter checks
if instance.beparams:
VerifyType("instance %s" % instance.name, "beparams",
cluster.FillBE(instance), constants.BES_PARAMETER_TYPES,
result.append)
# check that disks exists
for disk_uuid in instance.disks:
if disk_uuid not in data.disks:
result.append("Instance '%s' has invalid disk '%s'" %
(instance.name, disk_uuid))
instance_disks = self._UnlockedGetInstanceDisks(instance.uuid)
# gather the drbd ports for duplicate checks
for (idx, dsk) in enumerate(instance_disks):
if dsk.dev_type in constants.DTS_DRBD:
tcp_port = dsk.logical_id[2]
if tcp_port not in ports:
ports[tcp_port] = []
ports[tcp_port].append((instance.name, "drbd disk %s" % idx))
# gather network port reservation
net_port = getattr(instance, "network_port", None)
if net_port is not None:
if net_port not in ports:
ports[net_port] = []
ports[net_port].append((instance.name, "network port"))
wrong_names = _CheckInstanceDiskIvNames(instance_disks)
if wrong_names:
tmp = "; ".join(("name of disk %s should be '%s', but is '%s'" %
(idx, exp_name, actual_name))
for (idx, exp_name, actual_name) in wrong_names)
result.append("Instance '%s' has wrongly named disks: %s" %
(instance.name, tmp))
# cluster-wide pool of free ports
for free_port in cluster.tcpudp_port_pool:
if free_port not in ports:
ports[free_port] = []
ports[free_port].append(("cluster", "port marked as free"))
# compute tcp/udp duplicate ports
keys = list(ports)
keys.sort()
for pnum in keys:
pdata = ports[pnum]
if len(pdata) > 1:
txt = utils.CommaJoin(["%s/%s" % val for val in pdata])
result.append("tcp/udp port %s has duplicates: %s" % (pnum, txt))
# highest used tcp port check
if keys:
if keys[-1] > cluster.highest_used_port:
result.append("Highest used port mismatch, saved %s, computed %s" %
(cluster.highest_used_port, keys[-1]))
if not data.nodes[cluster.master_node].master_candidate:
result.append("Master node is not a master candidate")
# master candidate checks
mc_now, mc_max, _ = self._UnlockedGetMasterCandidateStats()
if mc_now < mc_max:
result.append("Not enough master candidates: actual %d, target %d" %
(mc_now, mc_max))
# node checks
for node_uuid, node in data.nodes.items():
if node.uuid != node_uuid:
result.append("Node '%s' is indexed by wrong UUID '%s'" %
(node.name, node_uuid))
if [node.master_candidate, node.drained, node.offline].count(True) > 1:
result.append("Node %s state is invalid: master_candidate=%s,"
" drain=%s, offline=%s" %
(node.name, node.master_candidate, node.drained,
node.offline))
if node.group not in data.nodegroups:
result.append("Node '%s' has invalid group '%s'" %
(node.name, node.group))
else:
VerifyType("node %s" % node.name, "ndparams",
cluster.FillND(node, data.nodegroups[node.group]),
constants.NDS_PARAMETER_TYPES, result.append)
used_globals = constants.NDC_GLOBALS.intersection(node.ndparams)
if used_globals:
result.append("Node '%s' has some global parameters set: %s" %
(node.name, utils.CommaJoin(used_globals)))
# nodegroups checks
nodegroups_names = set()
for nodegroup_uuid in data.nodegroups:
nodegroup = data.nodegroups[nodegroup_uuid]
if nodegroup.uuid != nodegroup_uuid:
result.append("node group '%s' (uuid: '%s') indexed by wrong uuid '%s'"
% (nodegroup.name, nodegroup.uuid, nodegroup_uuid))
if utils.UUID_RE.match(nodegroup.name.lower()):
result.append("node group '%s' (uuid: '%s') has uuid-like name" %
(nodegroup.name, nodegroup.uuid))
if nodegroup.name in nodegroups_names:
result.append("duplicate node group name '%s'" % nodegroup.name)
else:
nodegroups_names.add(nodegroup.name)
group_name = "group %s" % nodegroup.name
VerifyIpolicy(group_name, cluster.SimpleFillIPolicy(nodegroup.ipolicy),
False, result.append)
if nodegroup.ndparams:
VerifyType(group_name, "ndparams",
cluster.SimpleFillND(nodegroup.ndparams),
constants.NDS_PARAMETER_TYPES, result.append)
# drbd minors check
# FIXME: The check for DRBD map needs to be implemented in WConfd
# IP checks
default_nicparams = cluster.nicparams[constants.PP_DEFAULT]
ips = {}
def _AddIpAddress(ip, name):
ips.setdefault(ip, []).append(name)
_AddIpAddress(cluster.master_ip, "cluster_ip")
for node in data.nodes.values():
_AddIpAddress(node.primary_ip, "node:%s/primary" % node.name)
if node.secondary_ip != node.primary_ip:
_AddIpAddress(node.secondary_ip, "node:%s/secondary" % node.name)
for instance in data.instances.values():
for idx, nic in enumerate(instance.nics):
if nic.ip is None:
continue
nicparams = objects.FillDict(default_nicparams, nic.nicparams)
nic_mode = nicparams[constants.NIC_MODE]
nic_link = nicparams[constants.NIC_LINK]
if nic_mode == constants.NIC_MODE_BRIDGED:
link = "bridge:%s" % nic_link
elif nic_mode == constants.NIC_MODE_ROUTED:
link = "route:%s" % nic_link
elif nic_mode == constants.NIC_MODE_OVS:
link = "ovs:%s" % nic_link
else:
raise errors.ProgrammerError("NIC mode '%s' not handled" % nic_mode)
_AddIpAddress("%s/%s/%s" % (link, nic.ip, nic.network),
"instance:%s/nic:%d" % (instance.name, idx))
for ip, owners in ips.items():
if len(owners) > 1:
result.append("IP address %s is used by multiple owners: %s" %
(ip, utils.CommaJoin(owners)))
return result
@ConfigSync(shared=1)
def VerifyConfigAndLog(self, feedback_fn=None):
"""A simple wrapper around L{_UnlockedVerifyConfigAndLog}"""
return self._UnlockedVerifyConfigAndLog(feedback_fn=feedback_fn)
def _UnlockedVerifyConfigAndLog(self, feedback_fn=None):
"""Verify the configuration and log any errors.
The errors get logged as critical errors and also to the feedback function,
if given.
@param feedback_fn: Callable feedback function
@rtype: list
@return: a list of error messages; a non-empty list signifies
configuration errors
"""
assert feedback_fn is None or callable(feedback_fn)
# Warn on config errors, but don't abort the save - the
# configuration has already been modified, and we can't revert;
# the best we can do is to warn the user and save as is, leaving
# recovery to the user
config_errors = self._UnlockedVerifyConfig()
if config_errors:
errmsg = ("Configuration data is not consistent: %s" %
(utils.CommaJoin(config_errors)))
logging.critical(errmsg)
if feedback_fn:
feedback_fn(errmsg)
return config_errors
@ConfigSync(shared=1)
def VerifyConfig(self):
"""Verify function.
This is just a wrapper over L{_UnlockedVerifyConfig}.
@rtype: list
@return: a list of error messages; a non-empty list signifies
configuration errors
"""
return self._UnlockedVerifyConfig()
def AddTcpUdpPort(self, port):
"""Adds a new port to the available port pool."""
utils.SimpleRetry(True, self._wconfd.AddTcpUdpPort, 0.1, 30, args=[port])
self.OutDate()
@ConfigSync(shared=1)
def GetPortList(self):
"""Returns a copy of the current port list.
"""
return self._ConfigData().cluster.tcpudp_port_pool.copy()
def AllocatePort(self):
"""Allocate a port."""
def WithRetry():
port = self._wconfd.AllocatePort()
self.OutDate()
if port is None:
raise utils.RetryAgain()
else:
return port
return utils.Retry(WithRetry, 0.1, 30)
@ConfigSync(shared=1)
def ComputeDRBDMap(self):
"""Compute the used DRBD minor/nodes.
This is just a wrapper over a call to WConfd.
@return: dictionary of node_uuid: dict of minor: instance_uuid;
the returned dict will have all the nodes in it (even if with
an empty list).
"""
if self._offline:
raise errors.ProgrammerError("Can't call ComputeDRBDMap in offline mode")
else:
return dict((k, dict(v)) for (k, v) in self._wconfd.ComputeDRBDMap())
def AllocateDRBDMinor(self, node_uuids, disk_uuid):
"""Allocate a drbd minor.
This is just a wrapper over a call to WConfd.
The free minor will be automatically computed from the existing
devices. A node can not be given multiple times.
The result is the list of minors, in the same
order as the passed nodes.
@type node_uuids: list of strings
@param node_uuids: the nodes in which we allocate minors
@type disk_uuid: string
@param disk_uuid: the disk for which we allocate minors
@rtype: list of ints
@return: A list of minors in the same order as the passed nodes
"""
assert isinstance(disk_uuid, str), \
"Invalid argument '%s' passed to AllocateDRBDMinor" % disk_uuid
if self._offline:
raise errors.ProgrammerError("Can't call AllocateDRBDMinor"
" in offline mode")
result = self._wconfd.AllocateDRBDMinor(disk_uuid, node_uuids)
logging.debug("Request to allocate drbd minors, input: %s, returning %s",
node_uuids, result)
return result
def ReleaseDRBDMinors(self, disk_uuid):
"""Release temporary drbd minors allocated for a given disk.
This is just a wrapper over a call to WConfd.
@type disk_uuid: string
@param disk_uuid: the disk for which temporary minors should be released
"""
assert isinstance(disk_uuid, str), \
"Invalid argument passed to ReleaseDRBDMinors"
# in offline mode we allow the calls to release DRBD minors,
# because then nothing can be allocated anyway;
# this is useful for testing
if not self._offline:
self._wconfd.ReleaseDRBDMinors(disk_uuid)
@ConfigSync(shared=1)
def GetInstanceDiskTemplate(self, inst_uuid):
"""Return the disk template of an instance.
This corresponds to the currently attached disks. If no disks are attached,
it is L{constants.DT_DISKLESS}, if homogeneous disk types are attached,
that type is returned, if that isn't the case, L{constants.DT_MIXED} is
returned.
@type inst_uuid: str
@param inst_uuid: The uuid of the instance.
"""
return utils.GetDiskTemplate(self._UnlockedGetInstanceDisks(inst_uuid))
@ConfigSync(shared=1)
def GetConfigVersion(self):
"""Get the configuration version.
@return: Config version
"""
return self._ConfigData().version
@ConfigSync(shared=1)
def GetClusterName(self):
"""Get cluster name.
@return: Cluster name
"""
return self._ConfigData().cluster.cluster_name
@ConfigSync(shared=1)
def GetMasterNode(self):
"""Get the UUID of the master node for this cluster.
@return: Master node UUID
"""
return self._ConfigData().cluster.master_node
@ConfigSync(shared=1)
def GetMasterNodeName(self):
"""Get the hostname of the master node for this cluster.
@return: Master node hostname
"""
return self._UnlockedGetNodeName(self._ConfigData().cluster.master_node)
@ConfigSync(shared=1)
def GetMasterNodeInfo(self):
"""Get the master node information for this cluster.
@rtype: objects.Node
@return: Master node L{objects.Node} object
"""
return self._UnlockedGetNodeInfo(self._ConfigData().cluster.master_node)
@ConfigSync(shared=1)
def GetMasterIP(self):
"""Get the IP of the master node for this cluster.
@return: Master IP
"""
return self._ConfigData().cluster.master_ip
@ConfigSync(shared=1)
def GetMasterNetdev(self):
"""Get the master network device for this cluster.
"""
return self._ConfigData().cluster.master_netdev
@ConfigSync(shared=1)
def GetMasterNetmask(self):
"""Get the netmask of the master node for this cluster.
"""
return self._ConfigData().cluster.master_netmask
@ConfigSync(shared=1)
def GetUseExternalMipScript(self):
"""Get flag representing whether to use the external master IP setup script.
"""
return self._ConfigData().cluster.use_external_mip_script
@ConfigSync(shared=1)
def GetFileStorageDir(self):
"""Get the file storage dir for this cluster.
"""
return self._ConfigData().cluster.file_storage_dir
@ConfigSync(shared=1)
def GetSharedFileStorageDir(self):
"""Get the shared file storage dir for this cluster.
"""
return self._ConfigData().cluster.shared_file_storage_dir
@ConfigSync(shared=1)
def GetGlusterStorageDir(self):
"""Get the Gluster storage dir for this cluster.
"""
return self._ConfigData().cluster.gluster_storage_dir
@ConfigSync(shared=1)
def GetHypervisorType(self):
"""Get the hypervisor type for this cluster.
"""
return self._ConfigData().cluster.enabled_hypervisors[0]
@ConfigSync(shared=1)
def GetRsaHostKey(self):
"""Return the rsa hostkey from the config.
@rtype: string
@return: the rsa hostkey
"""
return self._ConfigData().cluster.rsahostkeypub
@ConfigSync(shared=1)
def GetDsaHostKey(self):
"""Return the dsa hostkey from the config.
@rtype: string
@return: the dsa hostkey
"""
return self._ConfigData().cluster.dsahostkeypub
@ConfigSync(shared=1)
def GetDefaultIAllocator(self):
"""Get the default instance allocator for this cluster.
"""
return self._ConfigData().cluster.default_iallocator
@ConfigSync(shared=1)
def GetDefaultIAllocatorParameters(self):
"""Get the default instance allocator parameters for this cluster.
@rtype: dict
@return: dict of iallocator parameters
"""
return self._ConfigData().cluster.default_iallocator_params
@ConfigSync(shared=1)
def GetPrimaryIPFamily(self):
"""Get cluster primary ip family.
@return: primary ip family
"""
return self._ConfigData().cluster.primary_ip_family
@ConfigSync(shared=1)
def GetMasterNetworkParameters(self):
"""Get network parameters of the master node.
@rtype: L{object.MasterNetworkParameters}
@return: network parameters of the master node
"""
cluster = self._ConfigData().cluster
result = objects.MasterNetworkParameters(
uuid=cluster.master_node, ip=cluster.master_ip,
netmask=cluster.master_netmask, netdev=cluster.master_netdev,
ip_family=cluster.primary_ip_family)
return result
@ConfigSync(shared=1)
def GetInstallImage(self):
"""Get the install image location
@rtype: string
@return: location of the install image
"""
return self._ConfigData().cluster.install_image
@ConfigSync()
def SetInstallImage(self, install_image):
"""Set the install image location
@type install_image: string
@param install_image: location of the install image
"""
self._ConfigData().cluster.install_image = install_image
@ConfigSync(shared=1)
def GetInstanceCommunicationNetwork(self):
"""Get cluster instance communication network
@rtype: string
@return: instance communication network, which is the name of the
network used for instance communication
"""
return self._ConfigData().cluster.instance_communication_network
@ConfigSync()
def SetInstanceCommunicationNetwork(self, network_name):
"""Set cluster instance communication network
@type network_name: string
@param network_name: instance communication network, which is the name of
the network used for instance communication
"""
self._ConfigData().cluster.instance_communication_network = network_name
@ConfigSync(shared=1)
def GetZeroingImage(self):
"""Get the zeroing image location
@rtype: string
@return: the location of the zeroing image
"""
return self._config_data.cluster.zeroing_image
@ConfigSync(shared=1)
def GetCompressionTools(self):
"""Get cluster compression tools
@rtype: list of string
@return: a list of tools that are cleared for use in this cluster for the
purpose of compressing data
"""
return self._ConfigData().cluster.compression_tools
@ConfigSync()
def SetCompressionTools(self, tools):
"""Set cluster compression tools
@type tools: list of string
@param tools: a list of tools that are cleared for use in this cluster for
the purpose of compressing data
"""
self._ConfigData().cluster.compression_tools = tools
@ConfigSync()
def AddNodeGroup(self, group, ec_id, check_uuid=True):
"""Add a node group to the configuration.
This method calls group.UpgradeConfig() to fill any missing attributes
according to their default values.
@type group: L{objects.NodeGroup}
@param group: the NodeGroup object to add
@type ec_id: string
@param ec_id: unique id for the job to use when creating a missing UUID
@type check_uuid: bool
@param check_uuid: add an UUID to the group if it doesn't have one or, if
it does, ensure that it does not exist in the
configuration already
"""
self._UnlockedAddNodeGroup(group, ec_id, check_uuid)
def _UnlockedAddNodeGroup(self, group, ec_id, check_uuid):
"""Add a node group to the configuration.
"""
logging.info("Adding node group %s to configuration", group.name)
# Some code might need to add a node group with a pre-populated UUID
# generated with ConfigWriter.GenerateUniqueID(). We allow them to bypass
# the "does this UUID" exist already check.
if check_uuid:
self._EnsureUUID(group, ec_id)
try:
existing_uuid = self._UnlockedLookupNodeGroup(group.name)
except errors.OpPrereqError:
pass
else:
raise errors.OpPrereqError("Desired group name '%s' already exists as a"
" node group (UUID: %s)" %
(group.name, existing_uuid),
errors.ECODE_EXISTS)
group.serial_no = 1
group.ctime = group.mtime = time.time()
group.UpgradeConfig()
self._ConfigData().nodegroups[group.uuid] = group
self._ConfigData().cluster.serial_no += 1
@ConfigSync()
def RemoveNodeGroup(self, group_uuid):
"""Remove a node group from the configuration.
@type group_uuid: string
@param group_uuid: the UUID of the node group to remove
"""
logging.info("Removing node group %s from configuration", group_uuid)
if group_uuid not in self._ConfigData().nodegroups:
raise errors.ConfigurationError("Unknown node group '%s'" % group_uuid)
assert len(self._ConfigData().nodegroups) != 1, \
"Group '%s' is the only group, cannot be removed" % group_uuid
del self._ConfigData().nodegroups[group_uuid]
self._ConfigData().cluster.serial_no += 1
def _UnlockedLookupNodeGroup(self, target):
"""Lookup a node group's UUID.
@type target: string or None
@param target: group name or UUID or None to look for the default
@rtype: string
@return: nodegroup UUID
@raises errors.OpPrereqError: when the target group cannot be found
"""
if target is None:
if len(self._ConfigData().nodegroups) != 1:
raise errors.OpPrereqError("More than one node group exists. Target"
" group must be specified explicitly.")
else:
return list(self._ConfigData().nodegroups)[0]
if target in self._ConfigData().nodegroups:
return target
for nodegroup in self._ConfigData().nodegroups.values():
if nodegroup.name == target:
return nodegroup.uuid
raise errors.OpPrereqError("Node group '%s' not found" % target,
errors.ECODE_NOENT)
@ConfigSync(shared=1)
def LookupNodeGroup(self, target):
"""Lookup a node group's UUID.
This function is just a wrapper over L{_UnlockedLookupNodeGroup}.
@type target: string or None
@param target: group name or UUID or None to look for the default
@rtype: string
@return: nodegroup UUID
"""
return self._UnlockedLookupNodeGroup(target)
def _UnlockedGetNodeGroup(self, uuid):
"""Lookup a node group.
@type uuid: string
@param uuid: group UUID
@rtype: L{objects.NodeGroup} or None
@return: nodegroup object, or None if not found
"""
if uuid not in self._ConfigData().nodegroups:
return None
return self._ConfigData().nodegroups[uuid]
@ConfigSync(shared=1)
def GetNodeGroup(self, uuid):
"""Lookup a node group.
@type uuid: string
@param uuid: group UUID
@rtype: L{objects.NodeGroup} or None
@return: nodegroup object, or None if not found
"""
return self._UnlockedGetNodeGroup(uuid)
def _UnlockedGetAllNodeGroupsInfo(self):
"""Get the configuration of all node groups.
"""
return dict(self._ConfigData().nodegroups)
@ConfigSync(shared=1)
def GetAllNodeGroupsInfo(self):
"""Get the configuration of all node groups.
"""
return self._UnlockedGetAllNodeGroupsInfo()
@ConfigSync(shared=1)
def GetAllNodeGroupsInfoDict(self):
"""Get the configuration of all node groups expressed as a dictionary of
dictionaries.
"""
return dict((uuid, ng.ToDict()) for (uuid, ng) in
self._UnlockedGetAllNodeGroupsInfo().items())
@ConfigSync(shared=1)
def GetNodeGroupList(self):
"""Get a list of node groups.
"""
return list(self._ConfigData().nodegroups)
@ConfigSync(shared=1)
def GetNodeGroupMembersByNodes(self, nodes):
"""Get nodes which are member in the same nodegroups as the given nodes.
"""
ngfn = lambda node_uuid: self._UnlockedGetNodeInfo(node_uuid).group
return frozenset(member_uuid
for node_uuid in nodes
for member_uuid in
self._UnlockedGetNodeGroup(ngfn(node_uuid)).members)
@ConfigSync(shared=1)
def GetMultiNodeGroupInfo(self, group_uuids):
"""Get the configuration of multiple node groups.
@param group_uuids: List of node group UUIDs
@rtype: list
@return: List of tuples of (group_uuid, group_info)
"""
return [(uuid, self._UnlockedGetNodeGroup(uuid)) for uuid in group_uuids]
def AddInstance(self, instance, _ec_id, replace=False):
"""Add an instance to the config.
This should be used after creating a new instance.
@type instance: L{objects.Instance}
@param instance: the instance object
@type replace: bool
@param replace: if true, expect the instance to be present and
replace rather than add.
"""
if not isinstance(instance, objects.Instance):
raise errors.ProgrammerError("Invalid type passed to AddInstance")
instance.serial_no = 1
utils.SimpleRetry(True, self._wconfd.AddInstance, 0.1, 30,
args=[instance.ToDict(),
self._GetWConfdContext(),
replace])
self.OutDate()
def _EnsureUUID(self, item, ec_id):
"""Ensures a given object has a valid UUID.
@param item: the instance or node to be checked
@param ec_id: the execution context id for the uuid reservation
"""
if not item.uuid:
item.uuid = self._GenerateUniqueID(ec_id)
else:
self._CheckUniqueUUID(item, include_temporary=True)
def _CheckUniqueUUID(self, item, include_temporary):
"""Checks that the UUID of the given object is unique.
@param item: the instance or node to be checked
@param include_temporary: whether temporarily generated UUID's should be
included in the check. If the UUID of the item to be checked is
a temporarily generated one, this has to be C{False}.
"""
if not item.uuid:
raise errors.ConfigurationError("'%s' must have an UUID" % (item.name,))
if item.uuid in self._AllIDs(include_temporary=include_temporary):
raise errors.ConfigurationError("Cannot add '%s': UUID %s already"
" in use" % (item.name, item.uuid))
def _CheckUUIDpresent(self, item):
"""Checks that an object with the given UUID exists.
@param item: the instance or other UUID possessing object to verify that
its UUID is present
"""
if not item.uuid:
raise errors.ConfigurationError("'%s' must have an UUID" % (item.name,))
if item.uuid not in self._AllIDs(include_temporary=False):
raise errors.ConfigurationError("Cannot replace '%s': UUID %s not present"
% (item.name, item.uuid))
def _SetInstanceStatus(self, inst_uuid, status, disks_active,
admin_state_source):
"""Set the instance's status to a given value.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
def WithRetry():
result = self._wconfd.SetInstanceStatus(inst_uuid, status,
disks_active, admin_state_source)
self.OutDate()
if result is None:
raise utils.RetryAgain()
else:
return result
return objects.Instance.FromDict(utils.Retry(WithRetry, 0.1, 30))
def MarkInstanceUp(self, inst_uuid):
"""Mark the instance status to up in the config.
This also sets the instance disks active flag.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, constants.ADMINST_UP, True,
constants.ADMIN_SOURCE)
def MarkInstanceOffline(self, inst_uuid):
"""Mark the instance status to down in the config.
This also clears the instance disks active flag.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, constants.ADMINST_OFFLINE, False,
constants.ADMIN_SOURCE)
def RemoveInstance(self, inst_uuid):
"""Remove the instance from the configuration.
"""
utils.SimpleRetry(True, self._wconfd.RemoveInstance, 0.1, 30,
args=[inst_uuid])
self.OutDate()
@ConfigSync()
def RenameInstance(self, inst_uuid, new_name):
"""Rename an instance.
This needs to be done in ConfigWriter and not by RemoveInstance
combined with AddInstance as only we can guarantee an atomic
rename.
"""
if inst_uuid not in self._ConfigData().instances:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
inst = self._ConfigData().instances[inst_uuid]
inst.name = new_name
instance_disks = self._UnlockedGetInstanceDisks(inst_uuid)
for (_, disk) in enumerate(instance_disks):
if disk.dev_type in [constants.DT_FILE, constants.DT_SHARED_FILE]:
# rename the file paths in logical and physical id
file_storage_dir = os.path.dirname(os.path.dirname(disk.logical_id[1]))
disk.logical_id = (disk.logical_id[0],
utils.PathJoin(file_storage_dir, inst.name,
os.path.basename(disk.logical_id[1])))
# Force update of ssconf files
self._ConfigData().cluster.serial_no += 1
def MarkInstanceDown(self, inst_uuid):
"""Mark the status of an instance to down in the configuration.
This does not touch the instance disks active flag, as shut down instances
can still have active disks.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, constants.ADMINST_DOWN, None,
constants.ADMIN_SOURCE)
def MarkInstanceUserDown(self, inst_uuid):
"""Mark the status of an instance to user down in the configuration.
This does not touch the instance disks active flag, as user shut
down instances can still have active disks.
"""
self._SetInstanceStatus(inst_uuid, constants.ADMINST_DOWN, None,
constants.USER_SOURCE)
def MarkInstanceDisksActive(self, inst_uuid):
"""Mark the status of instance disks active.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, None, True, None)
def MarkInstanceDisksInactive(self, inst_uuid):
"""Mark the status of instance disks inactive.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, None, False, None)
def _UnlockedGetInstanceList(self):
"""Get the list of instances.
This function is for internal use, when the config lock is already held.
"""
return list(self._ConfigData().instances)
@ConfigSync(shared=1)
def GetInstanceList(self):
"""Get the list of instances.
@return: array of instances, ex. ['instance2-uuid', 'instance1-uuid']
"""
return self._UnlockedGetInstanceList()
def ExpandInstanceName(self, short_name):
"""Attempt to expand an incomplete instance name.
"""
# Locking is done in L{ConfigWriter.GetAllInstancesInfo}
all_insts = self.GetAllInstancesInfo().values()
expanded_name = _MatchNameComponentIgnoreCase(
short_name, [inst.name for inst in all_insts])
if expanded_name is not None:
# there has to be exactly one instance with that name
inst = [n for n in all_insts if n.name == expanded_name][0]
return (inst.uuid, inst.name)
else:
return (None, None)
def _UnlockedGetInstanceInfo(self, inst_uuid):
"""Returns information about an instance.
This function is for internal use, when the config lock is already held.
"""
if inst_uuid not in self._ConfigData().instances:
return None
return self._ConfigData().instances[inst_uuid]
@ConfigSync(shared=1)
def GetInstanceInfo(self, inst_uuid):
"""Returns information about an instance.
It takes the information from the configuration file. Other information of
an instance are taken from the live systems.
@param inst_uuid: UUID of the instance
@rtype: L{objects.Instance}
@return: the instance object
"""
return self._UnlockedGetInstanceInfo(inst_uuid)
@ConfigSync(shared=1)
def GetInstanceNodeGroups(self, inst_uuid, primary_only=False):
"""Returns set of node group UUIDs for instance's nodes.
@rtype: frozenset
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if not instance:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
if primary_only:
nodes = [instance.primary_node]
else:
nodes = self._UnlockedGetInstanceNodes(instance.uuid)
return frozenset(self._UnlockedGetNodeInfo(node_uuid).group
for node_uuid in nodes)
@ConfigSync(shared=1)
def GetInstanceNetworks(self, inst_uuid):
"""Returns set of network UUIDs for instance's nics.
@rtype: frozenset
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if not instance:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
networks = set()
for nic in instance.nics:
if nic.network:
networks.add(nic.network)
return frozenset(networks)
@ConfigSync(shared=1)
def GetMultiInstanceInfo(self, inst_uuids):
"""Get the configuration of multiple instances.
@param inst_uuids: list of instance UUIDs
@rtype: list
@return: list of tuples (instance UUID, instance_info), where
instance_info is what would GetInstanceInfo return for the
node, while keeping the original order
"""
return [(uuid, self._UnlockedGetInstanceInfo(uuid)) for uuid in inst_uuids]
@ConfigSync(shared=1)
def GetMultiInstanceInfoByName(self, inst_names):
"""Get the configuration of multiple instances.
@param inst_names: list of instance names
@rtype: list
@return: list of tuples (instance, instance_info), where
instance_info is what would GetInstanceInfo return for the
node, while keeping the original order
"""
result = []
for name in inst_names:
instance = self._UnlockedGetInstanceInfoByName(name)
if instance:
result.append((instance.uuid, instance))
else:
raise errors.ConfigurationError("Instance data of instance '%s'"
" not found." % name)
return result
@ConfigSync(shared=1)
def GetAllInstancesInfo(self):
"""Get the configuration of all instances.
@rtype: dict
@return: dict of (instance, instance_info), where instance_info is what
would GetInstanceInfo return for the node
"""
return self._UnlockedGetAllInstancesInfo()
def _UnlockedGetAllInstancesInfo(self):
my_dict = dict([(inst_uuid, self._UnlockedGetInstanceInfo(inst_uuid))
for inst_uuid in self._UnlockedGetInstanceList()])
return my_dict
@ConfigSync(shared=1)
def GetInstancesInfoByFilter(self, filter_fn):
"""Get instance configuration with a filter.
@type filter_fn: callable
@param filter_fn: Filter function receiving instance object as parameter,
returning boolean. Important: this function is called while the
configuration locks is held. It must not do any complex work or call
functions potentially leading to a deadlock. Ideally it doesn't call any
other functions and just compares instance attributes.
"""
return dict((uuid, inst)
for (uuid, inst) in self._ConfigData().instances.items()
if filter_fn(inst))
@ConfigSync(shared=1)
def GetInstanceInfoByName(self, inst_name):
"""Get the L{objects.Instance} object for a named instance.
@param inst_name: name of the instance to get information for
@type inst_name: string
@return: the corresponding L{objects.Instance} instance or None if no
information is available
"""
return self._UnlockedGetInstanceInfoByName(inst_name)
def _UnlockedGetInstanceInfoByName(self, inst_name):
for inst in self._UnlockedGetAllInstancesInfo().values():
if inst.name == inst_name:
return inst
return None
def _UnlockedGetInstanceName(self, inst_uuid):
inst_info = self._UnlockedGetInstanceInfo(inst_uuid)
if inst_info is None:
raise errors.OpExecError("Unknown instance: %s" % inst_uuid)
return inst_info.name
@ConfigSync(shared=1)
def GetInstanceName(self, inst_uuid):
"""Gets the instance name for the passed instance.
@param inst_uuid: instance UUID to get name for
@type inst_uuid: string
@rtype: string
@return: instance name
"""
return self._UnlockedGetInstanceName(inst_uuid)
@ConfigSync(shared=1)
def GetInstanceNames(self, inst_uuids):
"""Gets the instance names for the passed list of nodes.
@param inst_uuids: list of instance UUIDs to get names for
@type inst_uuids: list of strings
@rtype: list of strings
@return: list of instance names
"""
return self._UnlockedGetInstanceNames(inst_uuids)
def SetInstancePrimaryNode(self, inst_uuid, target_node_uuid):
"""Sets the primary node of an existing instance
@param inst_uuid: instance UUID
@type inst_uuid: string
@param target_node_uuid: the new primary node UUID
@type target_node_uuid: string
"""
utils.SimpleRetry(True, self._wconfd.SetInstancePrimaryNode, 0.1, 30,
args=[inst_uuid, target_node_uuid])
self.OutDate()
@ConfigSync()
def SetDiskNodes(self, disk_uuid, nodes):
"""Sets the nodes of an existing disk
@param disk_uuid: disk UUID
@type disk_uuid: string
@param nodes: the new nodes for the disk
@type nodes: list of node uuids
"""
self._UnlockedGetDiskInfo(disk_uuid).nodes = nodes
@ConfigSync()
def SetDiskLogicalID(self, disk_uuid, logical_id):
"""Sets the logical_id of an existing disk
@param disk_uuid: disk UUID
@type disk_uuid: string
@param logical_id: the new logical_id for the disk
@type logical_id: tuple
"""
disk = self._UnlockedGetDiskInfo(disk_uuid)
if disk is None:
raise errors.ConfigurationError("Unknown disk UUID '%s'" % disk_uuid)
if len(disk.logical_id) != len(logical_id):
raise errors.ProgrammerError("Logical ID format mismatch\n"
"Existing logical ID: %s\n"
"New logical ID: %s", disk.logical_id,
logical_id)
disk.logical_id = logical_id
def _UnlockedGetInstanceNames(self, inst_uuids):
return [self._UnlockedGetInstanceName(uuid) for uuid in inst_uuids]
def _UnlockedAddNode(self, node, ec_id):
"""Add a node to the configuration.
@type node: L{objects.Node}
@param node: a Node instance
"""
logging.info("Adding node %s to configuration", node.name)
self._EnsureUUID(node, ec_id)
node.serial_no = 1
node.ctime = node.mtime = time.time()
self._UnlockedAddNodeToGroup(node.uuid, node.group)
assert node.uuid in self._ConfigData().nodegroups[node.group].members
self._ConfigData().nodes[node.uuid] = node
self._ConfigData().cluster.serial_no += 1
@ConfigSync()
def AddNode(self, node, ec_id):
"""Add a node to the configuration.
@type node: L{objects.Node}
@param node: a Node instance
"""
self._UnlockedAddNode(node, ec_id)
@ConfigSync()
def RemoveNode(self, node_uuid):
"""Remove a node from the configuration.
"""
logging.info("Removing node %s from configuration", node_uuid)
if node_uuid not in self._ConfigData().nodes:
raise errors.ConfigurationError("Unknown node '%s'" % node_uuid)
self._UnlockedRemoveNodeFromGroup(self._ConfigData().nodes[node_uuid])
del self._ConfigData().nodes[node_uuid]
self._ConfigData().cluster.serial_no += 1
def ExpandNodeName(self, short_name):
"""Attempt to expand an incomplete node name into a node UUID.
"""
# Locking is done in L{ConfigWriter.GetAllNodesInfo}
all_nodes = self.GetAllNodesInfo().values()
expanded_name = _MatchNameComponentIgnoreCase(
short_name, [node.name for node in all_nodes])
if expanded_name is not None:
# there has to be exactly one node with that name
node = [n for n in all_nodes if n.name == expanded_name][0]
return (node.uuid, node.name)
else:
return (None, None)
def _UnlockedGetNodeInfo(self, node_uuid):
"""Get the configuration of a node, as stored in the config.
This function is for internal use, when the config lock is already
held.
@param node_uuid: the node UUID
@rtype: L{objects.Node}
@return: the node object
"""
if node_uuid not in self._ConfigData().nodes:
return None
return self._ConfigData().nodes[node_uuid]
@ConfigSync(shared=1)
def GetNodeInfo(self, node_uuid):
"""Get the configuration of a node, as stored in the config.
This is just a locked wrapper over L{_UnlockedGetNodeInfo}.
@param node_uuid: the node UUID
@rtype: L{objects.Node}
@return: the node object
"""
return self._UnlockedGetNodeInfo(node_uuid)
@ConfigSync(shared=1)
def GetNodeInstances(self, node_uuid):
"""Get the instances of a node, as stored in the config.
@param node_uuid: the node UUID
@rtype: (list, list)
@return: a tuple with two lists: the primary and the secondary instances
"""
pri = []
sec = []
for inst in self._ConfigData().instances.values():
if inst.primary_node == node_uuid:
pri.append(inst.uuid)
if node_uuid in self._UnlockedGetInstanceSecondaryNodes(inst.uuid):
sec.append(inst.uuid)
return (pri, sec)
@ConfigSync(shared=1)
def GetNodeGroupInstances(self, uuid, primary_only=False):
"""Get the instances of a node group.
@param uuid: Node group UUID
@param primary_only: Whether to only consider primary nodes
@rtype: frozenset
@return: List of instance UUIDs in node group
"""
if primary_only:
nodes_fn = lambda inst: [inst.primary_node]
else:
nodes_fn = lambda inst: self._UnlockedGetInstanceNodes(inst.uuid)
return frozenset(inst.uuid
for inst in self._ConfigData().instances.values()
for node_uuid in nodes_fn(inst)
if self._UnlockedGetNodeInfo(node_uuid).group == uuid)
def _UnlockedGetHvparamsString(self, hvname):
"""Return the string representation of the list of hyervisor parameters of
the given hypervisor.
@see: C{GetHvparams}
"""
result = ""
hvparams = self._ConfigData().cluster.hvparams[hvname]
for key in hvparams:
result += "%s=%s\n" % (key, hvparams[key])
return result
@ConfigSync(shared=1)
def GetHvparamsString(self, hvname):
"""Return the hypervisor parameters of the given hypervisor.
@type hvname: string
@param hvname: name of a hypervisor
@rtype: string
@return: string containing key-value-pairs, one pair on each line;
format: KEY=VALUE
"""
return self._UnlockedGetHvparamsString(hvname)
def _UnlockedGetNodeList(self):
"""Return the list of nodes which are in the configuration.
This function is for internal use, when the config lock is already
held.
@rtype: list
"""
return list(self._ConfigData().nodes)
@ConfigSync(shared=1)
def GetNodeList(self):
"""Return the list of nodes which are in the configuration.
"""
return self._UnlockedGetNodeList()
def _UnlockedGetOnlineNodeList(self):
"""Return the list of nodes which are online.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.uuid for node in all_nodes if not node.offline]
@ConfigSync(shared=1)
def GetOnlineNodeList(self):
"""Return the list of nodes which are online.
"""
return self._UnlockedGetOnlineNodeList()
@ConfigSync(shared=1)
def GetVmCapableNodeList(self):
"""Return the list of nodes which are not vm capable.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.uuid for node in all_nodes if node.vm_capable]
@ConfigSync(shared=1)
def GetNonVmCapableNodeList(self):
"""Return the list of nodes' uuids which are not vm capable.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.uuid for node in all_nodes if not node.vm_capable]
@ConfigSync(shared=1)
def GetNonVmCapableNodeNameList(self):
"""Return the list of nodes' names which are not vm capable.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.name for node in all_nodes if not node.vm_capable]
@ConfigSync(shared=1)
def GetMultiNodeInfo(self, node_uuids):
"""Get the configuration of multiple nodes.
@param node_uuids: list of node UUIDs
@rtype: list
@return: list of tuples of (node, node_info), where node_info is
what would GetNodeInfo return for the node, in the original
order
"""
return [(uuid, self._UnlockedGetNodeInfo(uuid)) for uuid in node_uuids]
def _UnlockedGetAllNodesInfo(self):
"""Gets configuration of all nodes.
@note: See L{GetAllNodesInfo}
"""
return dict([(node_uuid, self._UnlockedGetNodeInfo(node_uuid))
for node_uuid in self._UnlockedGetNodeList()])
@ConfigSync(shared=1)
def GetAllNodesInfo(self):
"""Get the configuration of all nodes.
@rtype: dict
@return: dict of (node, node_info), where node_info is what
would GetNodeInfo return for the node
"""
return self._UnlockedGetAllNodesInfo()
def _UnlockedGetNodeInfoByName(self, node_name):
for node in self._UnlockedGetAllNodesInfo().values():
if node.name == node_name:
return node
return None
@ConfigSync(shared=1)
def GetNodeInfoByName(self, node_name):
"""Get the L{objects.Node} object for a named node.
@param node_name: name of the node to get information for
@type node_name: string
@return: the corresponding L{objects.Node} instance or None if no
information is available
"""
return self._UnlockedGetNodeInfoByName(node_name)
@ConfigSync(shared=1)
def GetNodeGroupInfoByName(self, nodegroup_name):
"""Get the L{objects.NodeGroup} object for a named node group.
@param nodegroup_name: name of the node group to get information for
@type nodegroup_name: string
@return: the corresponding L{objects.NodeGroup} instance or None if no
information is available
"""
for nodegroup in self._UnlockedGetAllNodeGroupsInfo().values():
if nodegroup.name == nodegroup_name:
return nodegroup
return None
def _UnlockedGetNodeName(self, node_spec):
if isinstance(node_spec, objects.Node):
return node_spec.name
elif isinstance(node_spec, str):
node_info = self._UnlockedGetNodeInfo(node_spec)
if node_info is None:
raise errors.OpExecError("Unknown node: %s" % node_spec)
return node_info.name
else:
raise errors.ProgrammerError("Can't handle node spec '%s'" % node_spec)
@ConfigSync(shared=1)
def GetNodeName(self, node_spec):
"""Gets the node name for the passed node.
@param node_spec: node to get names for
@type node_spec: either node UUID or a L{objects.Node} object
@rtype: string
@return: node name
"""
return self._UnlockedGetNodeName(node_spec)
def _UnlockedGetNodeNames(self, node_specs):
return [self._UnlockedGetNodeName(node_spec) for node_spec in node_specs]
@ConfigSync(shared=1)
def GetNodeNames(self, node_specs):
"""Gets the node names for the passed list of nodes.
@param node_specs: list of nodes to get names for
@type node_specs: list of either node UUIDs or L{objects.Node} objects
@rtype: list of strings
@return: list of node names
"""
return self._UnlockedGetNodeNames(node_specs)
@ConfigSync(shared=1)
def GetNodeGroupsFromNodes(self, node_uuids):
"""Returns groups for a list of nodes.
@type node_uuids: list of string
@param node_uuids: List of node UUIDs
@rtype: frozenset
"""
return frozenset(self._UnlockedGetNodeInfo(uuid).group
for uuid in node_uuids)
def _UnlockedGetMasterCandidateUuids(self):
"""Get the list of UUIDs of master candidates.
@rtype: list of strings
@return: list of UUIDs of all master candidates.
"""
return [node.uuid for node in self._ConfigData().nodes.values()
if node.master_candidate]
@ConfigSync(shared=1)
def GetMasterCandidateUuids(self):
"""Get the list of UUIDs of master candidates.
@rtype: list of strings
@return: list of UUIDs of all master candidates.
"""
return self._UnlockedGetMasterCandidateUuids()
def _UnlockedGetMasterCandidateStats(self, exceptions=None):
"""Get the number of current and maximum desired and possible candidates.
@type exceptions: list
@param exceptions: if passed, list of nodes that should be ignored
@rtype: tuple
@return: tuple of (current, desired and possible, possible)
"""
mc_now = mc_should = mc_max = 0
for node in self._ConfigData().nodes.values():
if exceptions and node.uuid in exceptions:
continue
if not (node.offline or node.drained) and node.master_capable:
mc_max += 1
if node.master_candidate:
mc_now += 1
pool_size = self._ConfigData().cluster.candidate_pool_size
mc_should = mc_max if pool_size is None else min(mc_max, pool_size)
return (mc_now, mc_should, mc_max)
@ConfigSync(shared=1)
def GetMasterCandidateStats(self, exceptions=None):
"""Get the number of current and maximum possible candidates.
This is just a wrapper over L{_UnlockedGetMasterCandidateStats}.
@type exceptions: list
@param exceptions: if passed, list of nodes that should be ignored
@rtype: tuple
@return: tuple of (current, max)
"""
return self._UnlockedGetMasterCandidateStats(exceptions)
@ConfigSync()
def MaintainCandidatePool(self, exception_node_uuids):
"""Try to grow the candidate pool to the desired size.
@type exception_node_uuids: list
@param exception_node_uuids: if passed, list of nodes that should be ignored
@rtype: list
@return: list with the adjusted nodes (L{objects.Node} instances)
"""
mc_now, mc_max, _ = self._UnlockedGetMasterCandidateStats(
exception_node_uuids)
mod_list = []
if mc_now < mc_max:
node_list = list(self._ConfigData().nodes)
random.shuffle(node_list)
for uuid in node_list:
if mc_now >= mc_max:
break
node = self._ConfigData().nodes[uuid]
if (node.master_candidate or node.offline or node.drained or
node.uuid in exception_node_uuids or not node.master_capable):
continue
mod_list.append(node)
node.master_candidate = True
node.serial_no += 1
mc_now += 1
if mc_now != mc_max:
# this should not happen
logging.warning("Warning: MaintainCandidatePool didn't manage to"
" fill the candidate pool (%d/%d)", mc_now, mc_max)
if mod_list:
self._ConfigData().cluster.serial_no += 1
return mod_list
def _UnlockedAddNodeToGroup(self, node_uuid, nodegroup_uuid):
"""Add a given node to the specified group.
"""
if nodegroup_uuid not in self._ConfigData().nodegroups:
# This can happen if a node group gets deleted between its lookup and
# when we're adding the first node to it, since we don't keep a lock in
# the meantime. It's ok though, as we'll fail cleanly if the node group
# is not found anymore.
raise errors.OpExecError("Unknown node group: %s" % nodegroup_uuid)
if node_uuid not in self._ConfigData().nodegroups[nodegroup_uuid].members:
self._ConfigData().nodegroups[nodegroup_uuid].members.append(node_uuid)
def _UnlockedRemoveNodeFromGroup(self, node):
"""Remove a given node from its group.
"""
nodegroup = node.group
if nodegroup not in self._ConfigData().nodegroups:
logging.warning("Warning: node '%s' has unknown node group '%s'"
" (while being removed from it)", node.uuid, nodegroup)
nodegroup_obj = self._ConfigData().nodegroups[nodegroup]
if node.uuid not in nodegroup_obj.members:
logging.warning("Warning: node '%s' not a member of its node group '%s'"
" (while being removed from it)", node.uuid, nodegroup)
else:
nodegroup_obj.members.remove(node.uuid)
@ConfigSync()
def AssignGroupNodes(self, mods):
"""Changes the group of a number of nodes.
@type mods: list of tuples; (node name, new group UUID)
@param mods: Node membership modifications
"""
groups = self._ConfigData().nodegroups
nodes = self._ConfigData().nodes
resmod = []
# Try to resolve UUIDs first
for (node_uuid, new_group_uuid) in mods:
try:
node = nodes[node_uuid]
except KeyError:
raise errors.ConfigurationError("Unable to find node '%s'" % node_uuid)
if node.group == new_group_uuid:
# Node is being assigned to its current group
logging.debug("Node '%s' was assigned to its current group (%s)",
node_uuid, node.group)
continue
# Try to find current group of node
try:
old_group = groups[node.group]
except KeyError:
raise errors.ConfigurationError("Unable to find old group '%s'" %
node.group)
# Try to find new group for node
try:
new_group = groups[new_group_uuid]
except KeyError:
raise errors.ConfigurationError("Unable to find new group '%s'" %
new_group_uuid)
assert node.uuid in old_group.members, \
("Inconsistent configuration: node '%s' not listed in members for its"
" old group '%s'" % (node.uuid, old_group.uuid))
assert node.uuid not in new_group.members, \
("Inconsistent configuration: node '%s' already listed in members for"
" its new group '%s'" % (node.uuid, new_group.uuid))
resmod.append((node, old_group, new_group))
# Apply changes
for (node, old_group, new_group) in resmod:
assert node.uuid != new_group.uuid and old_group.uuid != new_group.uuid, \
"Assigning to current group is not possible"
node.group = new_group.uuid
# Update members of involved groups
if node.uuid in old_group.members:
old_group.members.remove(node.uuid)
if node.uuid not in new_group.members:
new_group.members.append(node.uuid)
# Update timestamps and serials (only once per node/group object)
now = time.time()
for obj in frozenset(itertools.chain(*resmod)):
obj.serial_no += 1
obj.mtime = now
# Force ssconf update
self._ConfigData().cluster.serial_no += 1
def _BumpSerialNo(self):
"""Bump up the serial number of the config.
"""
self._ConfigData().serial_no += 1
self._ConfigData().mtime = time.time()
def _AllUUIDObjects(self):
"""Returns all objects with uuid attributes.
"""
return (list(self._ConfigData().instances.values()) +
list(self._ConfigData().nodes.values()) +
list(self._ConfigData().nodegroups.values()) +
list(self._ConfigData().networks.values()) +
list(self._ConfigData().disks.values()) +
self._AllNICs() +
[self._ConfigData().cluster])
def GetConfigManager(self, shared=False, forcelock=False):
"""Returns a ConfigManager, which is suitable to perform a synchronized
block of configuration operations.
WARNING: This blocks all other configuration operations, so anything that
runs inside the block should be very fast, preferably not using any IO.
"""
return ConfigManager(self, shared=shared, forcelock=forcelock)
def _AddLockCount(self, count):
self._lock_count += count
return self._lock_count
def _LockCount(self):
return self._lock_count
def _OpenConfig(self, shared, force=False):
"""Read the config data from WConfd or disk.
"""
if self._AddLockCount(1) > 1:
if self._lock_current_shared and not shared:
self._AddLockCount(-1)
raise errors.ConfigurationError("Can't request an exclusive"
" configuration lock while holding"
" shared")
elif not force or self._lock_forced or not shared or self._offline:
return # we already have the lock, do nothing
else:
self._lock_current_shared = shared
if force:
self._lock_forced = True
# Read the configuration data. If offline, read the file directly.
# If online, call WConfd.
if self._offline:
try:
raw_data = utils.ReadFile(self._cfg_file)
data_dict = serializer.Load(raw_data)
# Make sure the configuration has the right version
ValidateConfig(data_dict)
data = objects.ConfigData.FromDict(data_dict)
except errors.ConfigVersionMismatch:
raise
except Exception as err:
raise errors.ConfigurationError(err)
self._cfg_id = utils.GetFileID(path=self._cfg_file)
if (not hasattr(data, "cluster") or
not hasattr(data.cluster, "rsahostkeypub")):
raise errors.ConfigurationError("Incomplete configuration"
" (missing cluster.rsahostkeypub)")
if not data.cluster.master_node in data.nodes:
msg = ("The configuration denotes node %s as master, but does not"
" contain information about this node" %
data.cluster.master_node)
raise errors.ConfigurationError(msg)
master_info = data.nodes[data.cluster.master_node]
if master_info.name != self._my_hostname and not self._accept_foreign:
msg = ("The configuration denotes node %s as master, while my"
" hostname is %s; opening a foreign configuration is only"
" possible in accept_foreign mode" %
(master_info.name, self._my_hostname))
raise errors.ConfigurationError(msg)
self._SetConfigData(data)
# Upgrade configuration if needed
self._UpgradeConfig(saveafter=True)
else:
if shared and not force:
if self._config_data is None:
logging.debug("Requesting config, as I have no up-to-date copy")
dict_data = self._wconfd.ReadConfig()
logging.debug("Configuration received")
else:
dict_data = None
else:
# poll until we acquire the lock
while True:
logging.debug("Receiving config from WConfd.LockConfig [shared=%s]",
bool(shared))
dict_data = \
self._wconfd.LockConfig(self._GetWConfdContext(), bool(shared))
if dict_data is not None:
logging.debug("Received config from WConfd.LockConfig")
break
time.sleep(random.random())
try:
if dict_data is not None:
self._SetConfigData(objects.ConfigData.FromDict(dict_data))
self._UpgradeConfig()
except Exception as err:
raise errors.ConfigurationError(err)
def _CloseConfig(self, save):
"""Release resources relating the config data.
"""
if self._AddLockCount(-1) > 0:
return # we still have the lock, do nothing
if save:
try:
logging.debug("Writing configuration and unlocking it")
self._WriteConfig(releaselock=True)
logging.debug("Configuration write, unlock finished")
except Exception as err:
logging.critical("Can't write the configuration: %s", str(err))
raise
elif not self._offline and \
not (self._lock_current_shared and not self._lock_forced):
logging.debug("Unlocking configuration without writing")
self._wconfd.UnlockConfig(self._GetWConfdContext())
self._lock_forced = False
# TODO: To WConfd
def _UpgradeConfig(self, saveafter=False):
"""Run any upgrade steps.
This method performs both in-object upgrades and also update some data
elements that need uniqueness across the whole configuration or interact
with other objects.
@warning: if 'saveafter' is 'True', this function will call
L{_WriteConfig()} so it needs to be called only from a
"safe" place.
"""
# Keep a copy of the persistent part of _config_data to check for changes
# Serialization doesn't guarantee order in dictionaries
if saveafter:
oldconf = copy.deepcopy(self._ConfigData().ToDict())
else:
oldconf = None
# In-object upgrades
self._ConfigData().UpgradeConfig()
for item in self._AllUUIDObjects():
if item.uuid is None:
item.uuid = self._GenerateUniqueID(_UPGRADE_CONFIG_JID)
if not self._ConfigData().nodegroups:
default_nodegroup_name = constants.INITIAL_NODE_GROUP_NAME
default_nodegroup = objects.NodeGroup(name=default_nodegroup_name,
members=[])
self._UnlockedAddNodeGroup(default_nodegroup, _UPGRADE_CONFIG_JID, True)
for node in self._ConfigData().nodes.values():
if not node.group:
node.group = self._UnlockedLookupNodeGroup(None)
# This is technically *not* an upgrade, but needs to be done both when
# nodegroups are being added, and upon normally loading the config,
# because the members list of a node group is discarded upon
# serializing/deserializing the object.
self._UnlockedAddNodeToGroup(node.uuid, node.group)
if saveafter:
modified = (oldconf != self._ConfigData().ToDict())
else:
modified = True # can't prove it didn't change, but doesn't matter
if modified and saveafter:
self._WriteConfig()
self._UnlockedDropECReservations(_UPGRADE_CONFIG_JID)
else:
if self._offline:
self._UnlockedVerifyConfigAndLog()
def _WriteConfig(self, destination=None, releaselock=False):
"""Write the configuration data to persistent storage.
"""
if destination is None:
destination = self._cfg_file
# Save the configuration data. If offline, write the file directly.
# If online, call WConfd.
if self._offline:
self._BumpSerialNo()
txt = serializer.DumpJson(
self._ConfigData().ToDict(_with_private=True),
private_encoder=serializer.EncodeWithPrivateFields
)
getents = self._getents()
try:
fd = utils.SafeWriteFile(destination, self._cfg_id, data=txt,
close=False, gid=getents.confd_gid, mode=0o640)
except errors.LockError:
raise errors.ConfigurationError("The configuration file has been"
" modified since the last write, cannot"
" update")
try:
self._cfg_id = utils.GetFileID(fd=fd)
finally:
os.close(fd)
else:
try:
if releaselock:
res = self._wconfd.WriteConfigAndUnlock(self._GetWConfdContext(),
self._ConfigData().ToDict())
if not res:
logging.warning("WriteConfigAndUnlock indicates we already have"
" released the lock; assuming this was just a retry"
" and the initial call succeeded")
else:
self._wconfd.WriteConfig(self._GetWConfdContext(),
self._ConfigData().ToDict())
except errors.LockError:
raise errors.ConfigurationError("The configuration file has been"
" modified since the last write, cannot"
" update")
self.write_count += 1
def _GetAllHvparamsStrings(self, hypervisors):
"""Get the hvparams of all given hypervisors from the config.
@type hypervisors: list of string
@param hypervisors: list of hypervisor names
@rtype: dict of strings
@returns: dictionary mapping the hypervisor name to a string representation
of the hypervisor's hvparams
"""
hvparams = {}
for hv in hypervisors:
hvparams[hv] = self._UnlockedGetHvparamsString(hv)
return hvparams
@staticmethod
def _ExtendByAllHvparamsStrings(ssconf_values, all_hvparams):
"""Extends the ssconf_values dictionary by hvparams.
@type ssconf_values: dict of strings
@param ssconf_values: dictionary mapping ssconf_keys to strings
representing the content of ssconf files
@type all_hvparams: dict of strings
@param all_hvparams: dictionary mapping hypervisor names to a string
representation of their hvparams
@rtype: same as ssconf_values
@returns: the ssconf_values dictionary extended by hvparams
"""
for hv in all_hvparams:
ssconf_key = constants.SS_HVPARAMS_PREF + hv
ssconf_values[ssconf_key] = all_hvparams[hv]
return ssconf_values
def _UnlockedGetSshPortMap(self, node_infos):
node_ports = dict([(node.name,
self._UnlockedGetNdParams(node).get(
constants.ND_SSH_PORT))
for node in node_infos])
return node_ports
def _UnlockedGetSsconfValues(self):
"""Return the values needed by ssconf.
@rtype: dict
@return: a dictionary with keys the ssconf names and values their
associated value
"""
fn = "\n".join
instance_names = utils.NiceSort(
[inst.name for inst in
self._UnlockedGetAllInstancesInfo().values()])
node_infos = list(self._UnlockedGetAllNodesInfo().values())
node_names = [node.name for node in node_infos]
node_pri_ips = ["%s %s" % (ninfo.name, ninfo.primary_ip)
for ninfo in node_infos]
node_snd_ips = ["%s %s" % (ninfo.name, ninfo.secondary_ip)
for ninfo in node_infos]
node_vm_capable = ["%s=%s" % (ninfo.name, str(ninfo.vm_capable))
for ninfo in node_infos]
instance_data = fn(instance_names)
off_data = fn(node.name for node in node_infos if node.offline)
on_data = fn(node.name for node in node_infos if not node.offline)
mc_data = fn(node.name for node in node_infos if node.master_candidate)
mc_ips_data = fn(node.primary_ip for node in node_infos
if node.master_candidate)
node_data = fn(node_names)
node_pri_ips_data = fn(node_pri_ips)
node_snd_ips_data = fn(node_snd_ips)
node_vm_capable_data = fn(node_vm_capable)
cluster = self._ConfigData().cluster
cluster_tags = fn(cluster.GetTags())
master_candidates_certs = fn("%s=%s" % (mc_uuid, mc_cert)
for mc_uuid, mc_cert
in cluster.candidate_certs.items())
hypervisor_list = fn(cluster.enabled_hypervisors)
all_hvparams = self._GetAllHvparamsStrings(constants.HYPER_TYPES)
uid_pool = uidpool.FormatUidPool(cluster.uid_pool, separator="\n")
nodegroups = ["%s %s" % (nodegroup.uuid, nodegroup.name) for nodegroup in
self._ConfigData().nodegroups.values()]
nodegroups_data = fn(utils.NiceSort(nodegroups))
networks = ["%s %s" % (net.uuid, net.name) for net in
self._ConfigData().networks.values()]
networks_data = fn(utils.NiceSort(networks))
ssh_ports = fn("%s=%s" % (node_name, port)
for node_name, port
in self._UnlockedGetSshPortMap(node_infos).items())
ssconf_values = {
constants.SS_CLUSTER_NAME: cluster.cluster_name,
constants.SS_CLUSTER_TAGS: cluster_tags,
constants.SS_FILE_STORAGE_DIR: cluster.file_storage_dir,
constants.SS_SHARED_FILE_STORAGE_DIR: cluster.shared_file_storage_dir,
constants.SS_GLUSTER_STORAGE_DIR: cluster.gluster_storage_dir,
constants.SS_MASTER_CANDIDATES: mc_data,
constants.SS_MASTER_CANDIDATES_IPS: mc_ips_data,
constants.SS_MASTER_CANDIDATES_CERTS: master_candidates_certs,
constants.SS_MASTER_IP: cluster.master_ip,
constants.SS_MASTER_NETDEV: cluster.master_netdev,
constants.SS_MASTER_NETMASK: str(cluster.master_netmask),
constants.SS_MASTER_NODE: self._UnlockedGetNodeName(cluster.master_node),
constants.SS_NODE_LIST: node_data,
constants.SS_NODE_PRIMARY_IPS: node_pri_ips_data,
constants.SS_NODE_SECONDARY_IPS: node_snd_ips_data,
constants.SS_NODE_VM_CAPABLE: node_vm_capable_data,
constants.SS_OFFLINE_NODES: off_data,
constants.SS_ONLINE_NODES: on_data,
constants.SS_PRIMARY_IP_FAMILY: str(cluster.primary_ip_family),
constants.SS_INSTANCE_LIST: instance_data,
constants.SS_RELEASE_VERSION: constants.RELEASE_VERSION,
constants.SS_HYPERVISOR_LIST: hypervisor_list,
constants.SS_MAINTAIN_NODE_HEALTH: str(cluster.maintain_node_health),
constants.SS_UID_POOL: uid_pool,
constants.SS_NODEGROUPS: nodegroups_data,
constants.SS_NETWORKS: networks_data,
constants.SS_ENABLED_USER_SHUTDOWN: str(cluster.enabled_user_shutdown),
constants.SS_SSH_PORTS: ssh_ports,
}
ssconf_values = self._ExtendByAllHvparamsStrings(ssconf_values,
all_hvparams)
bad_values = [(k, v) for k, v in ssconf_values.items()
if not isinstance(v, str)]
if bad_values:
err = utils.CommaJoin("%s=%s" % (k, v) for k, v in bad_values)
raise errors.ConfigurationError("Some ssconf key(s) have non-string"
" values: %s" % err)
return ssconf_values
@ConfigSync(shared=1)
def GetSsconfValues(self):
"""Wrapper using lock around _UnlockedGetSsconf().
"""
return self._UnlockedGetSsconfValues()
@ConfigSync(shared=1)
def GetVGName(self):
"""Return the volume group name.
"""
return self._ConfigData().cluster.volume_group_name
@ConfigSync()
def SetVGName(self, vg_name):
"""Set the volume group name.
"""
self._ConfigData().cluster.volume_group_name = vg_name
self._ConfigData().cluster.serial_no += 1
@ConfigSync(shared=1)
def GetDRBDHelper(self):
"""Return DRBD usermode helper.
"""
return self._ConfigData().cluster.drbd_usermode_helper
@ConfigSync()
def SetDRBDHelper(self, drbd_helper):
"""Set DRBD usermode helper.
"""
self._ConfigData().cluster.drbd_usermode_helper = drbd_helper
self._ConfigData().cluster.serial_no += 1
@ConfigSync(shared=1)
def GetMACPrefix(self):
"""Return the mac prefix.
"""
return self._ConfigData().cluster.mac_prefix
@ConfigSync(shared=1)
def GetClusterInfo(self):
"""Returns information about the cluster
@rtype: L{objects.Cluster}
@return: the cluster object
"""
return self._ConfigData().cluster
@ConfigSync(shared=1)
def DisksOfType(self, dev_type):
"""Check if in there is at disk of the given type in the configuration.
"""
return self._ConfigData().DisksOfType(dev_type)
@ConfigSync(shared=1)
def GetDetachedConfig(self):
"""Returns a detached version of a ConfigManager, which represents
a read-only snapshot of the configuration at this particular time.
"""
return DetachedConfig(self._ConfigData())
def Update(self, target, feedback_fn, ec_id=None):
"""Notify function to be called after updates.
This function must be called when an object (as returned by
GetInstanceInfo, GetNodeInfo, GetCluster) has been updated and the
caller wants the modifications saved to the backing store. Note
that all modified objects will be saved, but the target argument
is the one the caller wants to ensure that it's saved.
@param target: an instance of either L{objects.Cluster},
L{objects.Node} or L{objects.Instance} which is existing in
the cluster
@param feedback_fn: Callable feedback function
"""
update_function = None
if isinstance(target, objects.Cluster):
if self._offline:
self.UpdateOfflineCluster(target, feedback_fn)
return
else:
update_function = self._wconfd.UpdateCluster
elif isinstance(target, objects.Node):
update_function = self._wconfd.UpdateNode
elif isinstance(target, objects.Instance):
update_function = self._wconfd.UpdateInstance
elif isinstance(target, objects.NodeGroup):
update_function = self._wconfd.UpdateNodeGroup
elif isinstance(target, objects.Network):
update_function = self._wconfd.UpdateNetwork
elif isinstance(target, objects.Disk):
update_function = self._wconfd.UpdateDisk
else:
raise errors.ProgrammerError("Invalid object type (%s) passed to"
" ConfigWriter.Update" % type(target))
def WithRetry():
result = update_function(target.ToDict())
self.OutDate()
if result is None:
raise utils.RetryAgain()
else:
return result
vals = utils.Retry(WithRetry, 0.1, 30)
self.OutDate()
target.serial_no = vals[0]
target.mtime = float(vals[1])
if ec_id is not None:
# Commit all ips reserved by OpInstanceSetParams and OpGroupSetParams
# FIXME: After RemoveInstance is moved to WConfd, use its internal
# functions from TempRes module.
self.CommitTemporaryIps(ec_id)
# Just verify the configuration with our feedback function.
# It will get written automatically by the decorator.
self.VerifyConfigAndLog(feedback_fn=feedback_fn)
@ConfigSync()
def UpdateOfflineCluster(self, target, feedback_fn):
self._ConfigData().cluster = target
target.serial_no += 1
target.mtime = time.time()
self.VerifyConfigAndLog(feedback_fn=feedback_fn)
def _UnlockedDropECReservations(self, _ec_id):
"""Drop per-execution-context reservations
"""
# FIXME: Remove the following two lines after all reservations are moved to
# wconfd.
for rm in self._all_rms:
rm.DropECReservations(_ec_id)
if not self._offline:
self._wconfd.DropAllReservations(self._GetWConfdContext())
def DropECReservations(self, ec_id):
self._UnlockedDropECReservations(ec_id)
@ConfigSync(shared=1)
def GetAllNetworksInfo(self):
"""Get configuration info of all the networks.
"""
return dict(self._ConfigData().networks)
def _UnlockedGetNetworkList(self):
"""Get the list of networks.
This function is for internal use, when the config lock is already held.
"""
return list(self._ConfigData().networks)
@ConfigSync(shared=1)
def GetNetworkList(self):
"""Get the list of networks.
@return: array of networks, ex. ["main", "vlan100", "200]
"""
return self._UnlockedGetNetworkList()
@ConfigSync(shared=1)
def GetNetworkNames(self):
"""Get a list of network names
"""
names = [net.name
for net in self._ConfigData().networks.values()]
return names
def _UnlockedGetNetwork(self, uuid):
"""Returns information about a network.
This function is for internal use, when the config lock is already held.
"""
if uuid not in self._ConfigData().networks:
return None
return self._ConfigData().networks[uuid]
@ConfigSync(shared=1)
def GetNetwork(self, uuid):
"""Returns information about a network.
It takes the information from the configuration file.
@param uuid: UUID of the network
@rtype: L{objects.Network}
@return: the network object
"""
return self._UnlockedGetNetwork(uuid)
@ConfigSync()
def AddNetwork(self, net, ec_id, check_uuid=True):
"""Add a network to the configuration.
@type net: L{objects.Network}
@param net: the Network object to add
@type ec_id: string
@param ec_id: unique id for the job to use when creating a missing UUID
"""
self._UnlockedAddNetwork(net, ec_id, check_uuid)
def _UnlockedAddNetwork(self, net, ec_id, check_uuid):
"""Add a network to the configuration.
"""
logging.info("Adding network %s to configuration", net.name)
if check_uuid:
self._EnsureUUID(net, ec_id)
net.serial_no = 1
net.ctime = net.mtime = time.time()
self._ConfigData().networks[net.uuid] = net
self._ConfigData().cluster.serial_no += 1
def _UnlockedLookupNetwork(self, target):
"""Lookup a network's UUID.
@type target: string
@param target: network name or UUID
@rtype: string
@return: network UUID
@raises errors.OpPrereqError: when the target network cannot be found
"""
if target is None:
return None
if target in self._ConfigData().networks:
return target
for net in self._ConfigData().networks.values():
if net.name == target:
return net.uuid
raise errors.OpPrereqError("Network '%s' not found" % target,
errors.ECODE_NOENT)
@ConfigSync(shared=1)
def LookupNetwork(self, target):
"""Lookup a network's UUID.
This function is just a wrapper over L{_UnlockedLookupNetwork}.
@type target: string
@param target: network name or UUID
@rtype: string
@return: network UUID
"""
return self._UnlockedLookupNetwork(target)
@ConfigSync()
def RemoveNetwork(self, network_uuid):
"""Remove a network from the configuration.
@type network_uuid: string
@param network_uuid: the UUID of the network to remove
"""
logging.info("Removing network %s from configuration", network_uuid)
if network_uuid not in self._ConfigData().networks:
raise errors.ConfigurationError("Unknown network '%s'" % network_uuid)
del self._ConfigData().networks[network_uuid]
self._ConfigData().cluster.serial_no += 1
def _UnlockedGetGroupNetParams(self, net_uuid, node_uuid):
"""Get the netparams (mode, link) of a network.
Get a network's netparams for a given node.
@type net_uuid: string
@param net_uuid: network uuid
@type node_uuid: string
@param node_uuid: node UUID
@rtype: dict or None
@return: netparams
"""
node_info = self._UnlockedGetNodeInfo(node_uuid)
nodegroup_info = self._UnlockedGetNodeGroup(node_info.group)
netparams = nodegroup_info.networks.get(net_uuid, None)
return netparams
@ConfigSync(shared=1)
def GetGroupNetParams(self, net_uuid, node_uuid):
"""Locking wrapper of _UnlockedGetGroupNetParams()
"""
return self._UnlockedGetGroupNetParams(net_uuid, node_uuid)
@ConfigSync(shared=1)
def CheckIPInNodeGroup(self, ip, node_uuid):
"""Check IP uniqueness in nodegroup.
Check networks that are connected in the node's node group
if ip is contained in any of them. Used when creating/adding
a NIC to ensure uniqueness among nodegroups.
@type ip: string
@param ip: ip address
@type node_uuid: string
@param node_uuid: node UUID
@rtype: (string, dict) or (None, None)
@return: (network name, netparams)
"""
if ip is None:
return (None, None)
node_info = self._UnlockedGetNodeInfo(node_uuid)
nodegroup_info = self._UnlockedGetNodeGroup(node_info.group)
for net_uuid in nodegroup_info.networks:
net_info = self._UnlockedGetNetwork(net_uuid)
pool = network.AddressPool(net_info)
if pool.Contains(ip):
return (net_info.name, nodegroup_info.networks[net_uuid])
return (None, None)
@ConfigSync(shared=1)
def GetCandidateCerts(self):
"""Returns the candidate certificate map.
"""
return self._ConfigData().cluster.candidate_certs
@ConfigSync()
def SetCandidateCerts(self, certs):
"""Replaces the master candidate cert list with the new values.
@type certs: dict of string to string
@param certs: map of node UUIDs to SSL client certificate digests.
"""
self._ConfigData().cluster.candidate_certs = certs
@ConfigSync()
def AddNodeToCandidateCerts(self, node_uuid, cert_digest,
info_fn=logging.info, warn_fn=logging.warn):
"""Adds an entry to the candidate certificate map.
@type node_uuid: string
@param node_uuid: the node's UUID
@type cert_digest: string
@param cert_digest: the digest of the node's client SSL certificate
@type info_fn: function
@param info_fn: logging function for information messages
@type warn_fn: function
@param warn_fn: logging function for warning messages
"""
cluster = self._ConfigData().cluster
if node_uuid in cluster.candidate_certs:
old_cert_digest = cluster.candidate_certs[node_uuid]
if old_cert_digest == cert_digest:
if info_fn is not None:
info_fn("Certificate digest for node %s already in config."
"Not doing anything." % node_uuid)
return
else:
if warn_fn is not None:
warn_fn("Overriding differing certificate digest for node %s"
% node_uuid)
cluster.candidate_certs[node_uuid] = cert_digest
@ConfigSync()
def RemoveNodeFromCandidateCerts(self, node_uuid,
warn_fn=logging.warn):
"""Removes the entry of the given node in the certificate map.
@type node_uuid: string
@param node_uuid: the node's UUID
@type warn_fn: function
@param warn_fn: logging function for warning messages
"""
cluster = self._ConfigData().cluster
if node_uuid not in cluster.candidate_certs:
if warn_fn is not None:
warn_fn("Cannot remove certifcate for node %s, because it's not"
" in the candidate map." % node_uuid)
return
del cluster.candidate_certs[node_uuid]
def FlushConfig(self):
"""Force the distribution of configuration to master candidates.
It is not necessary to hold a lock for this operation, it is handled
internally by WConfd.
"""
if not self._offline:
self._wconfd.FlushConfig()
def FlushConfigGroup(self, uuid):
"""Force the distribution of configuration to master candidates of a group.
It is not necessary to hold a lock for this operation, it is handled
internally by WConfd.
"""
if not self._offline:
self._wconfd.FlushConfigGroup(uuid)
@ConfigSync(shared=1)
def GetAllDiskInfo(self):
"""Get the configuration of all disks.
@rtype: dict
@return: dict of (disk, disk_info), where disk_info is what
would GetDiskInfo return for disk
"""
return self._UnlockedGetAllDiskInfo()
def _UnlockedGetAllDiskInfo(self):
return dict((disk_uuid, self._UnlockedGetDiskInfo(disk_uuid))
for disk_uuid in self._UnlockedGetDiskList())
@ConfigSync(shared=1)
def GetInstanceForDisk(self, disk_uuid):
"""Returns the instance the disk is currently attached to.
@type disk_uuid: string
@param disk_uuid: the identifier of the disk in question.
@rtype: string
@return: uuid of instance the disk is attached to.
"""
for inst_uuid, inst_info in self._UnlockedGetAllInstancesInfo().items():
if disk_uuid in inst_info.disks:
return inst_uuid
class DetachedConfig(ConfigWriter):
"""Read-only snapshot of the config."""
def __init__(self, config_data):
super(DetachedConfig, self).__init__(self, offline=True)
self._SetConfigData(config_data)
@staticmethod
def _WriteCallError():
raise errors.ProgrammerError("DetachedConfig supports only read-only"
" operations")
def _OpenConfig(self, shared, force=None):
if not shared:
DetachedConfig._WriteCallError()
def _CloseConfig(self, save):
if save:
DetachedConfig._WriteCallError()
|
bsd-2-clause
| -9,024,758,844,000,849,000
| 31.696176
| 80
| 0.659278
| false
| 3.931358
| true
| false
| false
|
javiercantero/streamlink
|
src/streamlink/plugins/viasat.py
|
1
|
4387
|
import re
from streamlink import NoStreamsError
from streamlink.exceptions import PluginError
from streamlink.plugin import Plugin
from streamlink.plugin.api import StreamMapper, http, validate
from streamlink.stream import HDSStream, HLSStream, RTMPStream
from streamlink.utils import rtmpparse
STREAM_API_URL = "https://playapi.mtgx.tv/v3/videos/stream/{0}"
_swf_url_re = re.compile(r"data-flashplayer-url=\"([^\"]+)\"")
_player_data_re = re.compile(r"window.fluxData\s*=\s*JSON.parse\(\"(.+)\"\);")
_stream_schema = validate.Schema(
validate.any(
None,
validate.all({"msg": validate.text}),
validate.all({
"streams": validate.all(
{validate.text: validate.any(validate.text, int, None)},
validate.filter(lambda k, v: isinstance(v, validate.text))
)
}, validate.get("streams"))
)
)
class Viasat(Plugin):
"""Streamlink Plugin for Viasat"""
_iframe_re = re.compile(r"""<iframe.+src=["'](?P<url>[^"']+)["'].+allowfullscreen""")
_image_re = re.compile(r"""<meta\sproperty=["']og:image["']\scontent=".+/(?P<stream_id>\d+)/[^/]+\.jpg""")
_url_re = re.compile(r"""https?://(?:www\.)?
(?:
juicyplay\.dk
|
play\.nova\.bg
|
(?:tvplay\.)?
skaties\.lv
|
(?:(?:tv3)?play\.)?
tv3\.(?:dk|ee|lt)
|
tv6play\.no
|
viafree\.(?:dk|no|se)
)
/(?:
(?:
.+/
|
embed\?id=
)
(?P<stream_id>\d+)
)?
""", re.VERBOSE)
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url)
def _get_swf_url(self):
res = http.get(self.url)
match = _swf_url_re.search(res.text)
if not match:
raise PluginError("Unable to find SWF URL in the HTML")
return match.group(1)
def _create_dynamic_streams(self, stream_type, parser, video):
try:
streams = parser(self.session, video[1])
return streams.items()
except IOError as err:
self.logger.error("Failed to extract {0} streams: {1}", stream_type, err)
def _create_rtmp_stream(self, video):
name, stream_url = video
params = {
"rtmp": stream_url,
"pageUrl": self.url,
"swfVfy": self._get_swf_url(),
}
if stream_url.endswith(".mp4"):
tcurl, playpath = rtmpparse(stream_url)
params["rtmp"] = tcurl
params["playpath"] = playpath
else:
params["live"] = True
return name, RTMPStream(self.session, params)
def _extract_streams(self, stream_id):
res = http.get(STREAM_API_URL.format(stream_id), raise_for_status=False)
stream_info = http.json(res, schema=_stream_schema)
if stream_info.get("msg"):
# error message
self.logger.error(stream_info.get("msg"))
raise NoStreamsError(self.url)
mapper = StreamMapper(lambda pattern, video: re.search(pattern, video[1]))
mapper.map(
r"/\w+\.m3u8",
self._create_dynamic_streams,
"HLS", HLSStream.parse_variant_playlist
)
mapper.map(
r"/\w+\.f4m",
self._create_dynamic_streams,
"HDS", HDSStream.parse_manifest
)
mapper.map(r"^rtmp://", self._create_rtmp_stream)
return mapper(stream_info.items())
def _get_stream_id(self, text):
"""Try to find a stream_id"""
m = self._image_re.search(text)
if m:
return m.group("stream_id")
def _get_iframe(self, text):
"""Fallback if no stream_id was found before"""
m = self._iframe_re.search(text)
if m:
return self.session.streams(m.group("url"))
def _get_streams(self):
match = self._url_re.match(self.url)
stream_id = match.group("stream_id")
if not stream_id:
text = http.get(self.url).text
stream_id = self._get_stream_id(text)
if not stream_id:
return self._get_iframe(text)
if stream_id:
return self._extract_streams(stream_id)
__plugin__ = Viasat
|
bsd-2-clause
| 6,951,403,913,173,260,000
| 28.641892
| 110
| 0.531115
| false
| 3.634631
| false
| false
| false
|
AnthonyDiGirolamo/heliopause
|
sector.py
|
1
|
9915
|
import libtcodpy as libtcod
import math
from random import randrange
import time
import pprint
pp = pprint.PrettyPrinter(indent=4, width=200).pprint
from planet import Planet
from asteroid import Asteroid
class Sector:
def __init__(self, screen_width, screen_height, buffer, background=libtcod.Color(0,0,0)):
self.twopi = 2 * math.pi
self.background = background
# self.background = libtcod.Color(32,32,64)
self.buffer = buffer
self.screen_width = screen_width
self.screen_height = screen_height
self.visible_space_left = 0
self.visible_space_top = 0
self.visible_space_right = 0
self.visible_space_bottom = 0
self.planets = []
self.asteroids = []
self.particles = []
self.selected_planet = None
self.selected_asteroid = None
self.selected_blink = 0
def mirror_y_coordinate(self, y):
return (self.screen_height- 1 - y)
def add_planet(self, **keyword_args):
self.planets.append(Planet(sector=self, **keyword_args))
self.planet_distances = [None for p in self.planets]
return [self.planets[-1].icon, self.planets[-1].icon_color, len(self.planets)]
def add_asteroid(self, **keyword_args):
self.asteroids.append(Asteroid(sector=self, **keyword_args))
self.asteroid_distances = [None for p in self.asteroids]
return [self.asteroids[-1].icon, self.asteroids[-1].icon_color, len(self.asteroids)]
def update_visibility(self, player_sector_position_x, player_sector_position_y):
self.visible_space_left = player_sector_position_x - self.screen_width/2
self.visible_space_top = player_sector_position_y + self.screen_height/2
self.visible_space_right = self.visible_space_left + self.screen_width
self.visible_space_bottom = self.visible_space_top - self.screen_height
def clear_selected_planet(self):
self.selected_planet = None
def distance_from_center(self, ship):
return math.sqrt(ship.sector_position_x**2 + ship.sector_position_y**2)
def update_selected_planet_distance(self, ship):
planet = self.get_selected_planet()
if self.selected_planet is not None:
self.planet_distances[self.selected_planet] = math.sqrt((ship.sector_position_x - planet.sector_position_x)**2.0 + (ship.sector_position_y - planet.sector_position_y)**2.0)
elif self.selected_asteroid is not None:
self.asteroid_distances[self.selected_asteroid] = math.sqrt((ship.sector_position_x - planet.sector_position_x)**2.0 + (ship.sector_position_y - planet.sector_position_y)**2.0)
newx = planet.sector_position_x - ship.sector_position_x
newy = planet.sector_position_y - ship.sector_position_y
try:
self.selected_planet_angle = math.atan(newy / newx)
except:
self.selected_planet_angle = 0.0
if newx > 0.0 and newy < 0.0:
self.selected_planet_angle += self.twopi
elif newx < 0.0:
self.selected_planet_angle += math.pi
def get_selected_planet(self):
if self.selected_planet is not None:
return self.planets[self.selected_planet]
elif self.selected_asteroid is not None:
return self.asteroids[self.selected_asteroid]
def selected_planet_distance(self):
if self.selected_planet is not None:
return self.planet_distances[self.selected_planet]
elif self.selected_asteroid is not None:
return self.asteroid_distances[self.selected_asteroid]
def update_all_planet_distances(self, ship):
self.planet_distances = [ math.sqrt((ship.sector_position_x - planet.sector_position_x)**2.0 + (ship.sector_position_y - planet.sector_position_y)**2.0) for planet in self.planets]
self.asteroid_distances = [ math.sqrt((ship.sector_position_x - asteroid.sector_position_x)**2.0 + (ship.sector_position_y - asteroid.sector_position_y)**2.0) for asteroid in self.asteroids]
def closest_planet(self, ship):
self.update_all_planet_distances(ship)
nearest_planet_index = 0
smallest_distance = None
for index, distance in enumerate(self.planet_distances):
if smallest_distance is None or distance < smallest_distance:
nearest_planet_index = index
smallest_distance = distance
return [nearest_planet_index, smallest_distance]
def closest_asteroid(self, ship):
self.update_all_planet_distances(ship)
nearest_asteroid_index = 0
smallest_distance = None
for index, distance in enumerate(self.asteroid_distances):
if smallest_distance is None or distance < smallest_distance:
nearest_asteroid_index = index
smallest_distance = distance
return [nearest_asteroid_index, smallest_distance]
def land_at_closest_planet(self, ship):
landed = False
message = None
index, distance = self.closest_planet(ship)
planet = self.planets[index]
if distance < 1.25*(planet.width/2.0):
for p in self.planets:
p.selected = False
planet.selected = True
if ship.velocity > 0.20:
message = "You are moving to fast to land.".format(distance)
else:
landed = True
planet.render_detail()
else:
message = "There isn't a planet in landing range."
if landed:
ship.velocity = 0.0
return [landed, message, index]
def add_particle(self, particle):
self.particles.append( particle )
def update_particle_positions(self):
for p in self.particles:
p.update_position()
def scroll_particles(self, heading=0.0, velocity=0.0):
deltax = math.cos(heading) * velocity * -1
deltay = math.sin(heading) * velocity * -1
# remove particles which have faded
self.particles = [p for p in self.particles if p.on_screen]
for particle in self.particles:
if particle.on_screen:
particle.x += deltax * 1.0
particle.y += deltay * 1.0
particle.index -= 1
if particle.index < 0:
particle.index = 0
particle.on_screen = False
def draw_minimap(self, buffer, width, height, ship):
zoom = 1.0
distance = 1000.0
zoom = float(int(distance + max([ abs((ship.sector_position_x)), abs(ship.sector_position_y) ])) / int(distance))
buffer.clear(self.background[0], self.background[1], self.background[2])
size = int((width-3) / 2.0)
size_reduction = (zoom*distance)/size
for index, p in enumerate(self.asteroids + self.planets):
x = size + 1 + int(p.sector_position_x / (size_reduction))
y = size + 1 - int(p.sector_position_y / (size_reduction))
if 0 < x < width-1 and 0 < y < height-1:
buffer.set(x, y, 0, 0, 0, p.icon_color[0], p.icon_color[1], p.icon_color[2], p.icon)
if self.selected_planet is not None:
x = size + 1 + int(self.planets[self.selected_planet].sector_position_x / (size_reduction))
y = size + 1 - int(self.planets[self.selected_planet].sector_position_y / (size_reduction))
t = time.clock()
if t > self.selected_blink + 0.5:
if t > self.selected_blink + 1.0:
self.selected_blink = t
buffer.set(x+1, y, 0, 0, 0, 0, 255, 0, 175)
buffer.set(x-1, y, 0, 0, 0, 0, 255, 0, 174)
x = size + 1 + int(ship.sector_position_x / (size_reduction))
y = size + 1 - int(ship.sector_position_y / (size_reduction))
if 0 < x < width-1 and 0 < y < height-1:
buffer.set_fore(x, y, 255, 255, 255, ship.icon())
def cycle_planet_target(self, ship):
self.deselect_asteroid()
if self.selected_planet == None:
self.selected_planet = 0
else:
self.selected_planet += 1
if self.selected_planet == len(self.planets):
self.selected_planet = None
if self.selected_planet is not None:
for p in self.planets:
p.selected = False
self.planets[self.selected_planet].selected = True
self.update_selected_planet_distance(ship)
def deselect_planet(self):
if self.selected_planet is not None:
self.selected_planet = None
for p in self.planets:
p.selected = False
def deselect_asteroid(self):
if self.selected_asteroid is not None:
self.selected_asteroid = None
for p in self.asteroids:
p.selected = False
def cycle_target(self, ship):
self.deselect_planet()
if self.selected_asteroid == None:
self.selected_asteroid = 0
else:
self.selected_asteroid += 1
if self.selected_asteroid == len(self.asteroids):
self.selected_asteroid = None
if self.selected_asteroid is not None:
for p in self.asteroids:
p.selected = False
self.asteroids[self.selected_asteroid].selected = True
self.update_selected_planet_distance(ship)
# self.update_selected_asteroid_distance(ship)
def target_nearest_planet(self, ship):
self.deselect_asteroid()
self.selected_planet, distance = self.closest_planet(ship)
self.planets[self.selected_planet].selected = True
def target_nearest_asteroid(self, ship):
self.deselect_planet()
self.selected_asteroid, distance = self.closest_asteroid(ship)
self.asteroids[self.selected_asteroid].selected = True
|
mit
| -1,482,294,771,838,708,700
| 39.635246
| 198
| 0.611195
| false
| 3.698247
| false
| false
| false
|
openqt/algorithms
|
leetcode/python/lc080-remove-duplicates-from-sorted-array-ii.py
|
1
|
2142
|
# coding=utf-8
import unittest
"""80. Remove Duplicates from Sorted Array II
https://leetcode.com/problems/remove-duplicates-from-sorted-array-ii/description/
Given a sorted array _nums_ , remove the duplicates [**in-
place**](https://en.wikipedia.org/wiki/In-place_algorithm) such that
duplicates appeared at most _twice_ and return the new length.
Do not allocate extra space for another array, you must do this by **modifying
the input array[in-place](https://en.wikipedia.org/wiki/In-place_algorithm)**
with O(1) extra memory.
**Example 1:**
Given _nums_ = **[1,1,1,2,2,3]** ,
Your function should return length = **5** , with the first five elements of _nums_ being **1, 1, 2, 2** and **3** respectively.
It doesn 't matter what you leave beyond the returned length.
**Example 2:**
Given _nums_ = **[0,0,1,1,1,1,2,3,3]** ,
Your function should return length = **7** , with the first seven elements of _nums_ being modified to **0** , **0** , **1** , **1** , **2** , **3** and **3** respectively.
It doesn 't matter what values are set beyond the returned length.
**Clarification:**
Confused why the returned value is an integer but your answer is an array?
Note that the input array is passed in by **reference** , which means
modification to the input array will be known to the caller as well.
Internally you can think of this:
// **nums** is passed in by reference. (i.e., without making a copy)
int len = removeDuplicates(nums);
// any modification to **nums** in your function would be known by the caller.
// using the length returned by your function, it prints the first **len** elements.
for (int i = 0; i < len; i++) {
print(nums[i]);
}
Similar Questions:
Remove Duplicates from Sorted Array (remove-duplicates-from-sorted-array)
"""
class Solution(object):
def removeDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
gpl-3.0
| 1,719,257,980,137,989,000
| 27.226667
| 180
| 0.630719
| false
| 3.624365
| false
| false
| false
|
diogocs1/simuladormips
|
lib/simulador.py
|
1
|
3312
|
# -*- encoding: UTF-8 -*-
from controle import UC
from lib.memoria import Mem_instrucoes, Mem_dados
from lib.registradores import Banco
from lib.operacoes import ULA
from lib.instrucoes import Instrucao_R_I
class Sistema (object):
def __init__(self):
self.__PC = 0
self.__UC = UC()
self.__ULA = ULA()
self.__memoriaInstrucao = Mem_instrucoes()
self.__memoriaDados = Mem_dados()
self.__bancoDeRegistradores = Banco()
def executaInstrucao (self):
instrucao = self.__memoriaInstrucao.getInstrucao(self.__PC)
valores = self.decodifica(instrucao)
if valores:
self.__ULA.opera(self.__UC, instrucao, valores)
self.incrementaPC()
def decodifica (self, instrucao):
'''
Função: decodifica(instrucao)
Descrição: Localiza e retorna valores de registradores e variáveis
'''
# Verifica se o PC aponta para um Label
if type(instrucao) is str:
return None
self.__UC.decodifica(instrucao)
if type(instrucao) is Instrucao_R_I:
resultado = instrucao.getResultado()
valor2 = None
# buscando o primeiro registrador
resultado = self.__bancoDeRegistradores.getRegistrador(nome=resultado)
# buscando operando 1
valor1 = instrucao.getValor1()
print valor1
if self.__bancoDeRegistradores.getRegistrador(nome=valor1):
valor1 = self.__bancoDeRegistradores.getRegistrador(nome=valor1).getValor()
elif self.__memoriaDados.getDado(nome=valor1):
valor1 = self.__memoriaDados.getDado(valor1).getValor()
# buscando operando 2
if instrucao.getValor2():
valor2 = instrucao.getValor2()
if self.__bancoDeRegistradores.getRegistrador(nome=valor2):
valor2 = self.__bancoDeRegistradores.getRegistrador(nome=valor2).getValor()
elif self.__memoriaDados.getDado(nome=valor2):
valor2 = self.__memoriaDados.getDado(valor2).getValor()
return [resultado, valor1, valor2]
else:
endereco = instrucao.getEndereco()
fila_de_inst = self.__memoriaInstrucao.getDados()
for inst in fila_de_inst:
if inst == endereco:
self.__PC = fila_de_inst.index(inst)
return None
return None
def getPC(self):
return self.__PC
def getProximaInstrucao(self):
try:
return self.__memoriaInstrucao.getInstrucao(self.__PC)
except:
return "Fim do programa!"
def setPC (self, indice):
self.__PC = indice
def incrementaPC(self):
self.__PC += 1
def getIR (self):
return self.__IR
def getMDR (self):
return self.__MDR
def getA (self):
return self.__A
def getB (self):
return self.__B
def getULA (self):
return self.__ULA
def getUC(self):
return self.__UC
def getMemoriaInstrucao(self):
return self.__memoriaInstrucao
def getMemoriaDados(self):
return self.__memoriaDados
def getBanco (self):
return self.__bancoDeRegistradores
|
gpl-2.0
| 6,597,753,176,257,532,000
| 35.351648
| 95
| 0.588449
| false
| 3.238981
| false
| false
| false
|
dmkelly/Django-Location-Form-Field
|
fields.py
|
1
|
4316
|
from django import forms
class LocationWidget(forms.widgets.Widget):
"""Forms widget to represent a location.
Uses Google Maps API to represent a location on a map with a marker.
"""
def __init__(self, *args, **kwargs):
super(LocationWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs):
if not value:
lat, lon = (0,0,)
else:
lat, lon = value.split(',')
html = []
if attrs.get('help_text') is not None:
html.append('<p>' + attrs['help_text'] + '</p>')
html.append("""<div id="map" style="height:%(height)s;width:%(width)s;">
<noscript>This page requires JavaScript.</noscript>
</div>
<input id="gmap_loc_%(name)s" type="hidden" name="%(name)s" value="%(value)s" />
<script type="text/javascript">
function initialize_map() {
if(typeof(google) == 'undefined') {
document.getElementById('map').innerHTML = 'Google API not found';
return;
}
var options = {
center: new google.maps.LatLng(%(lat)s, %(lon)s),
zoom: 13,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
%(name)s_map = new google.maps.Map(document.getElementById('map'),
options);
var marker = new google.maps.Marker({
position: %(name)s_map.getCenter(),
draggable: true,
animation: google.maps.Animation.DROP,
map: %(name)s_map,
title: '%(marker_text)s'
});
google.maps.event.addListener(marker, 'position_changed', function() {
var valInput=document.getElementById('gmap_loc_%(name)s');
valInput.value = marker.getPosition().lat()+','+marker.getPosition().lng();
});
google.maps.event.addListener(%(name)s_map, 'resize', function() {
%(name)s_map.setCenter(%(name)s_map.getCenter());
});
}
initialize_map();
</script>
""" % {'name': name, 'value':value,
'height':self.attrs.get('height', '400px'),
'width':self.attrs.get('width', '400px'),
'lat': lat, 'lon': lon,
'marker_text':self.attrs.get('marker_text', 'Drag the marker to the desired location')})
return ''.join(html)
class LocationField(forms.Field):
"""This form field is used to obtain a latitude and longitude coordinate
from a Google Map.
"""
widget = LocationWidget
def __init__(self, *args, **kwargs):
super(LocationField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
else:
return {'latitude': self.__parse_latitude(value),
'longitude': self.__parse_longitude(value)}
def __to_micro_coordinate(self, coord):
"""Only works on cleaned data."""
if not coord:
return None
return int(float(coord) * 1000000)
def validate(self, value):
super(LocationField, self).validate(value)
if type(value) is dict:
self.__validate_as_dict(value)
else:
self.__validate_as_dict({'latitude':self.__parse_latitude(value),
'longitude':self.__parse_longitude(value)})
def __validate_as_dict(self, value):
if not (value['latitude'] and value['longitude']):
raise forms.ValidationError('Missing at least one coordinate')
if value['latitude'] > 90.000000 or value['latitude'] < -90.000000:
raise forms.ValidationError('Latitude out of range')
if value['longitude'] > 180.000000 or value['longitude'] < -180.000000:
raise forms.ValidationError('Longitude out of range')
def __parse_latitude(self, value):
return float(value.split(',')[0])
def __parse_longitude(self, value):
try:
return float(value.split(',')[1])
except IndexError:
return None
|
epl-1.0
| 6,844,412,214,279,928,000
| 39.716981
| 103
| 0.522938
| false
| 4.342052
| false
| false
| false
|
ahlusar1989/flowzillow
|
flowzillow/client.py
|
1
|
9263
|
from urlparse import urljoin
import requests
from flowzillow import constants
from flowzillow.exceptions import ZillowError
def _trim_none_values(dict_):
new_dict = dict(dict_)
del_keys = []
for k, v in new_dict.iteritems():
if not v:
del_keys.append(k)
for key in del_keys:
del new_dict[key]
return new_dict
def _validate_response(response):
if response.status_code != constants.SUCCESS_CODE:
raise ZillowError(response)
class SearchClient(object):
def search(self, latlong1, latlong2, **kwargs):
"""
Search for all home listings in within a set of rectangular geocoordinates. Returns
a block of JSON with all the search results. Since this is an undocumented API not
all parameters available to this API are of known purpose
:param tuple latlong1: Geocoords of the upper left point of the rectangular search box
:param tuple latlong2: Geocoords of the lower right point of the rectangular search box
:param **kwargs:
:param spt: Seems to be "homes" by default
:param rid: Region ID. A region unique number
:param days: Number of days on market. Select "any" for any number of days
:param att: Custom keyword search.
:param sort: Sort by choice of (days/featured/priced/pricea/lot/built/size/baths/beds/zest/zesta)
:param zoom: The zoom of the map.
:param pf: Search for properties in pre-foreclosure (0/1)
:param pmf: Search for foreclosed properties (0/1)
:param laundry: In unit laundry (rentals only) (0/1)
:param parking: On site-parking (rentals only) (0/1)
:param pets: Accepts pets (rentals only) (0/1)
:param bd: Bedrooms (number plus) eg input of "1," means 1 bedroom and up
:param pr: Price (number plus) eg input of 50000 means 50000 and up
:param ba: Bathrooms (number plus)
:param sf: Square feet "<min>,<max>". If either min or max not set just leave blank but keep comma
:param lot: Lot size "<min>,<max>"
:param yr: Year build "<min>,<max>"
:param lt: List Type. A 6 digit binary number for filtering by for sale 111111 would mean search for
all for sale "By Agent", "By Owner", "Foreclosures", "New Homes", "Open Houses Only", "Coming Soon."
:param status: Status of home. A 6 digit binary number. input of 111011 means search for all houses
(Set to 1 to search "For Sale"), "Make me move", "Recently Sold", (Next bit seems unused),
"For Rent", (Set to 1 if you want to search for foreclosure properties)
:param ht: Home Type. A 6 digit binary number. 111111 means search for "Houses", "Condos",
"Apartments", "Manufactured", "Lots/Land", "Townhomes"
:param rt: ?? 6 seems to be default
:param red: ?? 0 seems to be default
:param pho: ?? 0 seems to be default
:param pnd: ?? 0 seems to be default
:param zso: ?? 0 seems to be default
:param ds: ?? "all" seems to be default
:param p: ?? 1 seems to be default
"""
params = self._make_search_params(latlong1, latlong2, **kwargs)
response = requests.get(
urljoin(constants.BASE_URL, "search/GetResults.htm"), params=params
)
_validate_response(response)
return response.json()
def _make_rect_param(self, latlong1, latlong2):
geo1 = map(lambda coord: str(coord).replace(".", ""), reversed(list(latlong1)))
geo2 = map(lambda coord: str(coord).replace(".", ""), reversed(list(latlong2)))
return ",".join(geo1 + geo2)
def _make_search_params(self, latlong1, latlong2, **kwargs):
rect = self._make_rect_param(latlong1, latlong2)
param_dict = {
"ht": constants.HOME_TYPE,
"isMapSearch": False,
"lt": constants.LISTING_TYPE,
"rect": rect,
"red": constants.RED,
"rt": constants.RT,
"search": constants.SEARCH,
"spt": constants.SPT,
"status": constants.STATUS,
"zoom": constants.ZOOM_LEVEL,
"pr": ",",
"mp": ",",
"bd": "0,",
"ba": "0,",
"sf": "0,",
"lot": "0,",
"yr": "0,",
"pho": "0,",
"pets": 0,
"parking": 0,
"laundry": 0,
"pnd": 0,
"zso": 0,
"days": constants.DAYS,
"ds": constants.DS,
"pf": constants.PF,
"pmf": constants.PMF,
"p": constants.P,
"sort": constants.SORT,
}
param_dict.update(kwargs)
return param_dict.items()
class ZillowClient(object):
def __init__(self, zws_id):
self.zws_id = zws_id
def _perform_get_request(self, path, params):
response = requests.get(urljoin(constants.ZILLOW_WEBSERVICE, path),
params=_trim_none_values(params).items())
_validate_response(response)
return response.content
def get_z_estimate(self, zpid, rent_z_estimate=None):
return self._perform_get_request(
"GetZestimate.htm",
{"zws-id": self.zws_id, "zpid": zpid, "rentzestimate": rent_z_estimate},
)
def get_search_results(self, address, city_state_zip, rent_z_estimate=None):
return self._perform_get_request(
"GetSearchResults.htm",
{"zws-id": self.zws_id,
"address": address,
"citystatezip": city_state_zip,
"rent_z_estimate": rent_z_estimate},
)
def get_chart(self, zpid, unit_type, width, height, chart_duration):
return self._perform_get_request(
"GetChart.htm",
{"zws-id": self.zws_id,
"zpid": zpid,
"unit-type": unit_type,
"width": "width",
"height": height,
"chartDuration": chart_duration}
)
def get_comps(self, zpid, count, rent_z_estimate=None):
return self._perform_get_request(
"GetComps.htm",
{"zws-id": self.zws_id,
"zpid": zpid,
"count": count,
"rentzestimate": rent_z_estimate}
)
def get_deep_comps(self, zpid, count, rent_z_estimate=None):
return self._perform_get_request(
"GetDeepComps.htm",
{"zws-id": self.zws_id,
"zpid": zpid,
"count": count,
"rentzestimate": rent_z_estimate}
)
def get_deep_search_results(self, address, city_state_zip, rent_z_estimate=None):
return self._perform_get_request(
"GetDeepSearchResults.htm",
{"zws-id": self.zws_id,
"address": address,
"citystatezip": city_state_zip,
"rent_z_estimate": rent_z_estimate}
)
def get_updated_property_details(self, zpid):
return self._perform_get_request(
"GetUpdatedPropertyDetails.htm",
{"zws-id": self.zws_id, "zpid": zpid}
)
def get_demographics(self, region_id=None, state=None, city=None, neighborhood=None, zipcode=None):
"""
Get the demographics of a specific city.
At least rid, state/city, city/neighborhood, or zipcode is required
"""
if not region_id and not (state and city) and not (city and neighborhood) and not zipcode:
raise ValueError("At least rid, state/city, city/neighborhood, or zipcode is required")
return self._perform_get_request(
"GetDemographics.htm",
{"zws-id": self.zws_id,
"regionId": region_id,
"state": state,
"city": city,
"neighborhood": neighborhood,
"zip": zipcode}
)
def get_region_children(self, region_id=None, state=None, county=None, city=None, child_type=None):
"""
Get a list of sub-regions with their relevant information
At least region_id or state is required
"""
if not region_id and not state:
raise ValueError("At least region_id or state is required")
return self._perform_get_request(
"GetRegionChildren.htm",
{"zws-id": self.zws_id,
"regionId": region_id,
"state": state,
"county": county,
"city": city,
"childtype": child_type}
)
def get_region_chart(self,
unit_type,
city=None,
state=None,
neighborhood=None,
zipcode=None,
width=None,
height=None,
chart_duration=None):
return self._perform_get_request(
"GetRegionChart.htm",
{"zws-id": self.zws_id,
"city": city,
"state": state,
"neighborhood": neighborhood,
"zip": zipcode,
"unit-type": unit_type,
"width": width,
"height": height,
"chartDuration": chart_duration}
)
|
gpl-2.0
| -5,074,563,894,233,085,000
| 37.435685
| 108
| 0.555975
| false
| 3.753241
| false
| false
| false
|
Alexander-Minyushkin/aistreamer
|
worker/app.py
|
1
|
1520
|
import os
from flask import Flask, render_template, request
import luigi
from luigi.contrib.gcs import GCSTarget, GCSClient
import subprocess
from merge_video import MergeVideoAndAudio
app = Flask(__name__)
@app.route('/')
def hello_world():
target = os.environ.get('TARGET', 'World')
return 'Hello {}!\n'.format(target)
# http://localhost:8080/merge_video?youtube_id=asdf&text_id=pg_12
@app.route('/merge_video', methods=['GET'])
def merge_video():
youtube_id = request.args.get('youtube_id')
youtube_link = f'https://www.youtube.com/watch?v={youtube_id}'
text_id = request.args.get('text_id')
# --scheduler-url
# https://luigi.readthedocs.io/en/latest/central_scheduler.html
# $luigid --background --pidfile <PATH_TO_PIDFILE> --logdir <PATH_TO_LOGDIR> --state-path <PATH_TO_STATEFILE>
scheduler_url = os.environ.get('SCHEDULER', 'http://127.0.0.1:8082')
#if not num:
luigi.run(['detect.MergeVideoAndAudio',
'--gs-path-video', youtube_link, #'gs://amvideotest/Welcome_to_Adam_Does_Movies.mp4', # 'gs://amvideotest/battlefield1.mp4', #
'--text-generator','markov',
'--text-generator-source', 'gs://amvideotest/source/pg/pg345.txt', #'gs://amvideotest/source/pg/pg345.txt',
'--workers', '1',
'--scheduler-url', scheduler_url])
return f'Completed youtube_link: {youtube_link}\ntext_id: {text_id}'
if __name__ == "__main__":
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
apache-2.0
| -997,589,223,079,689,300
| 35.214286
| 139
| 0.653289
| false
| 3.12115
| false
| false
| false
|
capitalone/cloud-custodian
|
tests/test_webhook.py
|
1
|
9095
|
# Copyright 2019 Microsoft Corporation
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import datetime
import json
import mock
from c7n.actions.webhook import Webhook
from c7n.exceptions import PolicyValidationError
from .common import BaseTest
import os
class WebhookTest(BaseTest):
def test_valid_policy(self):
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com",
}
],
}
self.assertTrue(self.load_policy(data=policy, validate=True))
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com",
"batch": True,
"query-params": {
"foo": "bar"
}
}
],
}
self.assertTrue(self.load_policy(data=policy, validate=True))
def test_invalid_policy(self):
# Missing URL parameter
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook"
}
],
}
with self.assertRaises(PolicyValidationError):
self.load_policy(data=policy, validate=True)
# Bad method
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com",
"method": "CREATE"
}
],
}
with self.assertRaises(PolicyValidationError):
self.load_policy(data=policy, validate=True)
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_batch(self, request_mock):
resources = [
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"batch": True,
"batch-size": 2,
"query-params": {
"foo": "resources[0].name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req = request_mock.call_args[1]
# 5 resources with max batch size 2 == 3 calls
self.assertEqual(3, len(request_mock.call_args_list))
# Check out one of the calls in detail
self.assertEqual("http://foo.com?foo=test_name", req['url'])
self.assertEqual("POST", req['method'])
self.assertEqual({}, req['headers'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_batch_body(self, request_mock):
resources = [
{
"name": "test_name",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"batch": True,
"body": "resources[].name",
"body-size": 10,
"headers": {
"test": "'header'"
},
"query-params": {
"foo": "resources[0].name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req = request_mock.call_args[1]
self.assertEqual("http://foo.com?foo=test_name", req['url'])
self.assertEqual("POST", req['method'])
self.assertEqual(b'[\n"test_name"\n]', req['body'])
self.assertEqual(
{"test": "header", "Content-Type": "application/json"},
req['headers'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_date_serializer(self, request_mock):
current = datetime.datetime.utcnow()
resources = [
{
"name": "test1",
"value": current
},
]
data = {
"url": "http://foo.com",
"body": "resources[]",
'batch': True,
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
self.assertEqual(
json.loads(req1['body'])[0]['value'],
current.isoformat())
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_no_batch(self, request_mock):
resources = [
{
"name": "test1",
"value": "test_value"
},
{
"name": "test2",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"query-params": {
"foo": "resource.name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
req2 = request_mock.call_args_list[1][1]
self.assertEqual("http://foo.com?foo=test1", req1['url'])
self.assertEqual("http://foo.com?foo=test2", req2['url'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_existing_query_string(self, request_mock):
resources = [
{
"name": "test1",
"value": "test_value"
},
{
"name": "test2",
"value": "test_value"
}
]
data = {
"url": "http://foo.com?existing=test",
"query-params": {
"foo": "resource.name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
req2 = request_mock.call_args_list[1][1]
self.assertIn("existing=test", req1['url'])
self.assertIn("foo=test1", req1['url'])
self.assertIn("existing=test", req2['url'])
self.assertIn("foo=test2", req2['url'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_policy_metadata(self, request_mock):
resources = [
{
"name": "test1",
"value": "test_value"
},
{
"name": "test2",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"query-params": {
"policy": "policy.name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
req2 = request_mock.call_args_list[1][1]
self.assertEqual("http://foo.com?policy=webhook_policy", req1['url'])
self.assertEqual("http://foo.com?policy=webhook_policy", req2['url'])
@mock.patch('c7n.actions.webhook.urllib3.ProxyManager.request')
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_with_http_proxy(self, pool_request_mock, proxy_request_mock):
with mock.patch.dict(os.environ,
{'HTTP_PROXY': 'http://mock.http.proxy.server:8000'},
clear=True):
resources = [
{
"name": "test_name",
"value": "test_value"
}
]
data = {
"url": "http://foo.com"
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
proxy_req = proxy_request_mock.call_args[1]
self.assertEqual("http://foo.com", proxy_req['url'])
self.assertEqual("POST", proxy_req['method'])
self.assertEqual(1, proxy_request_mock.call_count)
self.assertEqual(0, pool_request_mock.call_count)
def _get_manager(self):
"""The tests don't require real resource data
or recordings, but they do need a valid manager with
policy metadata so we just make one here to use"""
policy = self.load_policy({
"name": "webhook_policy",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com"}
]})
return policy.resource_manager
|
apache-2.0
| 4,588,318,786,688,363,500
| 28.433657
| 82
| 0.464651
| false
| 4.080305
| true
| false
| false
|
dude56987/YoutubeTV
|
resources/lib/tables.py
|
1
|
8449
|
#########################################################################
# Generic database libary using pickle to store values in files.
# Copyright (C) 2016 Carl J Smith
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########################################################################
import masterdebug
debug=masterdebug.init(False)
from files import loadFile
from files import writeFile
################################################################################
# import all the things
from pickle import loads as unpickle
from pickle import dumps as pickle
from os.path import join as pathJoin
from os.path import exists as pathExists
from os import listdir
from os import makedirs
from os import remove as removeFile
from random import choice
################################################################################
class table():
def __init__(self,path):
'''
DB table to store things as files and directories. This is
designed to reduce ram usage when reading things from large
databases. Specifically this is designed for caches.
# variables #
.path
The path on the filesystem where the table is stored.
.names
Gives you a list containing the names of all stored
values as strings.
.namePaths
Gives you a dict where the keys are the names and
the value is the path of that value database file
.length
The length of names stored in this table
'''
# path of the root of the cache, this is where files
# will be stored on the system
self.path=path
# create the paths if they do not exist
if not pathExists(self.path):
makedirs(self.path)
debug.add('table path',self.path)
# the path prefix is for tables stored in tables
self.pathPrefix=''
# tables are stored as files
tempTable=[]
# array of all the value names stored on table
namesPath=pathJoin(self.path,'names.table')
# if no namepaths exist create them
if not pathExists(pathJoin(namesPath)):
# write the new value to the system
writeFile(namesPath,pickle(dict()))
# load the name paths
self.namePaths=unpickle(loadFile(namesPath))
debug.add('self.namePaths',self.namePaths)
# create a array of all the names of values stored
self.names=self.namePaths.keys()
debug.add('self.names',self.names)
# length of all the values stored on the table
self.length=len(self.names)
debug.add('self.length',self.length)
# the protected list is a array of names that are
# protected from limit cleaning
protectedPath=pathJoin(self.path,'protected.table')
if pathExists(pathJoin(protectedPath)):
# load the list
self.protectedList=unpickle(loadFile(protectedPath))
else:
# create a blank list
self.protectedList=[]
# limit value to limit the number of values
# load the limit value from file if it exists
limitPath=pathJoin(self.path,'limit.table')
if pathExists(pathJoin(limitPath)):
self.limit=unpickle(loadFile(limitPath))
else:
self.limit=None
################################################################################
def reset(self):
'''
Delete all stored values stored in the table.
'''
for value in self.names:
self.deleteValue(value)
################################################################################
def setProtected(self,name):
'''
Set a name in the table to be protected from removal
because of limits.
'''
# generate the filepath to the protected values
# list
filePath=pathJoin(self.path,'protected.table')
# check if the path exists
if pathExists(filePath):
# read the protected list from the file
protectedList=unpickle(loadFile(filePath))
else:
# create the list and append the name
protectedList=[]
# append the new value to the list
protectedList.append(name)
# pickle the protected list for storage
protectedList=pickle(protectedList)
# write the changes back to the protected list
writeFile(filePath,protectedList)
################################################################################
def setLimit(self,limit):
'''
Set the limit of values that are stored in this table.
This ignores protected values.
'''
# write the limit value to the limit file in the table
filePath=pathJoin(self.path,'limit.table')
# set the limit in this instance
self.limit=limit
# write the new limit back to the storage
success=writeFile(filePath,limit)
return success
################################################################################
def checkLimits(self):
if self.limit is not None and\
self.length-len(self.protectedList) > limit:
deathList=[]
for name in self.names:
if name not in self.protectedList:
deathList.append(name)
# randomly pick a value to delete
# TODO: create table metadata to dertermine the
# time that values were added to the table
# and remove the oldest value when limits
# have been exceeded
deathMark=choice(deathList)
# delete the value
if self.deleteValue(deathMark) is False:
return False
# successfully removed item or no items needed
# to be removed
return True
################################################################################
def loadValue(self,name):
'''
Loads a saved value and returns it.
'''
# find the file path in the names array
if name in self.names:
filePath=self.namePaths[name]
else:
return False
# check if the path exists
if pathExists(filePath):
# load the data
fileData=loadFile(filePath)
else:
# return false if the value does not exist
return False
# unpickle the filedata
fileData = unpickle(fileData)
debug.add('loading value '+str(name),fileData)
# returns the value of a table stored on disk
return fileData
################################################################################
def saveValue(self,name,value):
'''
Save a value with the name name and the value value.
'''
debug.add('saving value '+str(name),value)
# create a file assocation for the name to store the value
if name not in self.names:
debug.add('name not in self.names')
# create a counter for the generated filename
counter=0
# seed value for while loop
newName = (str(counter)+'.value')
# find a filename that does not already exist in
# the database directory
while newName in listdir(self.path):
# increment the counter
counter+=1
# iterate the value
newName=(str(counter)+'.value')
debug.add('newname',newName)
# set the metadata value for the filepaths in this table instance
self.namePaths[name]=pathJoin(self.path,newName)
# write the newly created name assocation to table metadata on disk
writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths))
debug.add('namePaths',self.namePaths)
# update the length and names attributes
self.names=self.namePaths.keys()
self.length=len(self.names)
# saves a table changes back onto the disk
fileData=writeFile(self.namePaths[name],pickle(value))
return fileData
################################################################################
def deleteValue(self,name):
'''
Delete a value with name name.
'''
# clean up names to avoid stupid
debug.add('deleting value ',name)
# figure out the path to the named value file
if name in self.names:
filePath=self.namePaths[name]
# remove the metadata entry
del self.namePaths[name]
# write changes to database metadata file
writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths))
# update the length and names attributes
self.names=self.namePaths.keys()
self.length=len(self.names)
else:
return False
if pathExists(filePath):
# remove the file accocated with the value
removeFile(filePath)
return True
else:
return False
################################################################################
|
gpl-3.0
| -4,693,631,649,817,199,000
| 34.953191
| 81
| 0.644928
| false
| 3.961088
| false
| false
| false
|
CubicERP/odoo
|
addons/sale/sale.py
|
1
|
70307
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.decimal_precision as dp
from openerp import workflow
class res_company(osv.Model):
_inherit = "res.company"
_columns = {
'sale_note': fields.text('Default Terms and Conditions', translate=True, help="Default terms and conditions for quotations."),
}
class sale_order(osv.osv):
_name = "sale.order"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_description = "Sales Order"
_track = {
'state': {
'sale.mt_order_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state in ['manual'],
'sale.mt_order_sent': lambda self, cr, uid, obj, ctx=None: obj.state in ['sent']
},
}
def _amount_line_tax(self, cr, uid, line, context=None):
val = 0.0
for c in self.pool.get('account.tax').compute_all(cr, uid, line.tax_id, line.price_unit * (1-(line.discount or 0.0)/100.0), line.product_uom_qty, line.product_id, line.order_id.partner_id)['taxes']:
val += c.get('amount', 0.0)
return val
def _amount_all_wrapper(self, cr, uid, ids, field_name, arg, context=None):
""" Wrapper because of direct method passing as parameter for function fields """
return self._amount_all(cr, uid, ids, field_name, arg, context=context)
def _amount_all(self, cr, uid, ids, field_name, arg, context=None):
cur_obj = self.pool.get('res.currency')
res = {}
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = {
'amount_untaxed': 0.0,
'amount_tax': 0.0,
'amount_total': 0.0,
}
val = val1 = 0.0
cur = order.pricelist_id.currency_id
for line in order.order_line:
val1 += line.price_subtotal
val += self._amount_line_tax(cr, uid, line, context=context)
res[order.id]['amount_tax'] = cur_obj.round(cr, uid, cur, val)
res[order.id]['amount_untaxed'] = cur_obj.round(cr, uid, cur, val1)
res[order.id]['amount_total'] = res[order.id]['amount_untaxed'] + res[order.id]['amount_tax']
return res
def _invoiced_rate(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
if sale.invoiced:
res[sale.id] = 100.0
continue
tot = 0.0
for invoice in sale.invoice_ids:
if invoice.state not in ('draft', 'cancel'):
tot += invoice.amount_untaxed
if tot:
res[sale.id] = min(100.0, tot * 100.0 / (sale.amount_untaxed or 1.00))
else:
res[sale.id] = 0.0
return res
def _invoice_exists(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = False
if sale.invoice_ids:
res[sale.id] = True
return res
def _invoiced(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = True
invoice_existence = False
for invoice in sale.invoice_ids:
if invoice.state!='cancel':
invoice_existence = True
if invoice.state != 'paid':
res[sale.id] = False
break
if not invoice_existence or sale.state == 'manual':
res[sale.id] = False
return res
def _invoiced_search(self, cursor, user, obj, name, args, context=None):
if not len(args):
return []
clause = ''
sale_clause = ''
no_invoiced = False
for arg in args:
if (arg[1] == '=' and arg[2]) or (arg[1] == '!=' and not arg[2]):
clause += 'AND inv.state = \'paid\''
else:
clause += 'AND inv.state != \'cancel\' AND sale.state != \'cancel\' AND inv.state <> \'paid\' AND rel.order_id = sale.id '
sale_clause = ', sale_order AS sale '
no_invoiced = True
cursor.execute('SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel, account_invoice AS inv '+ sale_clause + \
'WHERE rel.invoice_id = inv.id ' + clause)
res = cursor.fetchall()
if no_invoiced:
cursor.execute('SELECT sale.id ' \
'FROM sale_order AS sale ' \
'WHERE sale.id NOT IN ' \
'(SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel) and sale.state != \'cancel\'')
res.extend(cursor.fetchall())
if not res:
return [('id', '=', 0)]
return [('id', 'in', [x[0] for x in res])]
def _get_order(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('sale.order.line').browse(cr, uid, ids, context=context):
result[line.order_id.id] = True
return result.keys()
def _get_default_company(self, cr, uid, context=None):
company_id = self.pool.get('res.users')._get_company(cr, uid, context=context)
if not company_id:
raise osv.except_osv(_('Error!'), _('There is no default company for the current user!'))
return company_id
def _get_default_section_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
section_id = self._resolve_section_id_from_context(cr, uid, context=context) or False
if not section_id:
section_id = self.pool.get('res.users').browse(cr, uid, uid, context).default_section_id.id or False
return section_id
def _resolve_section_id_from_context(self, cr, uid, context=None):
""" Returns ID of section based on the value of 'section_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_section_id')) in (int, long):
return context.get('default_section_id')
if isinstance(context.get('default_section_id'), basestring):
section_ids = self.pool.get('crm.case.section').name_search(cr, uid, name=context['default_section_id'], context=context)
if len(section_ids) == 1:
return int(section_ids[0][0])
return None
_columns = {
'name': fields.char('Order Reference', required=True, copy=False,
readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True),
'origin': fields.char('Source Document', help="Reference of the document that generated this sales order request."),
'client_order_ref': fields.char('Reference/Description', copy=False),
'state': fields.selection([
('draft', 'Draft Quotation'),
('confirm', 'Confirmed'),
('sent', 'Quotation Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Sales Order'),
('manual', 'Sale to Invoice'),
('shipping_except', 'Shipping Exception'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, copy=False, help="Gives the status of the quotation or sales order.\
\nThe exception status is automatically set when a cancel operation occurs \
in the invoice validation (Invoice Exception) or in the picking list process (Shipping Exception).\nThe 'Waiting Schedule' status is set when the invoice is confirmed\
but waiting for the scheduler to run on the order date.", select=True),
'date_order': fields.datetime('Date', required=True, readonly=True, select=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=False),
'create_date': fields.datetime('Creation Date', readonly=True, select=True, help="Date on which sales order is created."),
'date_confirm': fields.date('Confirmation Date', readonly=True, select=True, help="Date on which sales order is confirmed.", copy=False),
'user_id': fields.many2one('res.users', 'Salesperson', states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True, track_visibility='onchange'),
'partner_id': fields.many2one('res.partner', 'Customer', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, required=True, change_default=True, select=True, track_visibility='always'),
'partner_invoice_id': fields.many2one('res.partner', 'Invoice Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Invoice address for current sales order."),
'partner_shipping_id': fields.many2one('res.partner', 'Delivery Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Delivery address for current sales order."),
'order_policy': fields.selection([
('manual', 'On Demand'),
], 'Create Invoice', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
help="""This field controls how invoice and delivery operations are synchronized."""),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Pricelist for current sales order."),
'currency_id': fields.related('pricelist_id', 'currency_id', type="many2one", relation="res.currency", string="Currency", readonly=True, required=True),
'project_id': fields.many2one('account.analytic.account', 'Contract / Analytic', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="The analytic account related to a sales order."),
'order_line': fields.one2many('sale.order.line', 'order_id', 'Order Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=True),
'invoice_ids': fields.many2many('account.invoice', 'sale_order_invoice_rel', 'order_id', 'invoice_id', 'Invoices', readonly=True, copy=False, help="This is the list of invoices that have been generated for this sales order. The same sales order may have been invoiced in several times (by line for example)."),
'invoiced_rate': fields.function(_invoiced_rate, string='Invoiced Ratio', type='float'),
'invoiced': fields.function(_invoiced, string='Paid',
fnct_search=_invoiced_search, type='boolean', help="It indicates that an invoice has been paid."),
'invoice_exists': fields.function(_invoice_exists, string='Invoiced',
fnct_search=_invoiced_search, type='boolean', help="It indicates that sales order has at least one invoice."),
'note': fields.text('Terms and conditions'),
'amount_untaxed': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Untaxed Amount',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The amount without tax.", track_visibility='always'),
'amount_tax': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Taxes',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The tax amount."),
'amount_total': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Total',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The total amount."),
'payment_term': fields.many2one('account.payment.term', 'Payment Term'),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position'),
'company_id': fields.many2one('res.company', 'Company'),
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
'procurement_group_id': fields.many2one('procurement.group', 'Procurement group', copy=False),
'product_id': fields.related('order_line', 'product_id', type='many2one', relation='product.product', string='Product'),
}
_defaults = {
'date_order': fields.datetime.now,
'order_policy': 'manual',
'company_id': _get_default_company,
'state': 'draft',
'user_id': lambda obj, cr, uid, context: uid,
'name': lambda obj, cr, uid, context: '/',
'partner_invoice_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['invoice'])['invoice'],
'partner_shipping_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['delivery'])['delivery'],
'note': lambda self, cr, uid, context: self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.sale_note,
'section_id': lambda s, cr, uid, c: s._get_default_section_id(cr, uid, c),
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Order Reference must be unique per Company!'),
]
_order = 'date_order desc, id desc'
# Form filling
def unlink(self, cr, uid, ids, context=None):
sale_orders = self.read(cr, uid, ids, ['state'], context=context)
unlink_ids = []
for s in sale_orders:
if s['state'] in ['draft', 'cancel']:
unlink_ids.append(s['id'])
else:
raise osv.except_osv(_('Invalid Action!'), _('In order to delete a confirmed sales order, you must cancel it before!'))
return osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
def copy_quotation(self, cr, uid, ids, context=None):
id = self.copy(cr, uid, ids[0], context=context)
view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'sale', 'view_order_form')
view_id = view_ref and view_ref[1] or False,
return {
'type': 'ir.actions.act_window',
'name': _('Sales Order'),
'res_model': 'sale.order',
'res_id': id,
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'target': 'current',
'nodestroy': True,
}
def onchange_pricelist_id(self, cr, uid, ids, pricelist_id, order_lines, context=None):
context = context or {}
if not pricelist_id:
return {}
value = {
'currency_id': self.pool.get('product.pricelist').browse(cr, uid, pricelist_id, context=context).currency_id.id
}
if not order_lines or order_lines == [(6, 0, [])]:
return {'value': value}
warning = {
'title': _('Pricelist Warning!'),
'message' : _('If you change the pricelist of this order (and eventually the currency), prices of existing order lines will not be updated.')
}
return {'warning': warning, 'value': value}
def get_salenote(self, cr, uid, ids, partner_id, context=None):
context_lang = context.copy()
if partner_id:
partner_lang = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context).lang
context_lang.update({'lang': partner_lang})
return self.pool.get('res.users').browse(cr, uid, uid, context=context_lang).company_id.sale_note
def onchange_delivery_id(self, cr, uid, ids, company_id, partner_id, delivery_id, fiscal_position, context=None):
r = {'value': {}}
if not fiscal_position:
if not company_id:
company_id = self._get_default_company(cr, uid, context=context)
fiscal_position = self.pool['account.fiscal.position'].get_fiscal_position(cr, uid, company_id, partner_id, delivery_id, context=context)
if fiscal_position:
r['value']['fiscal_position'] = fiscal_position
return r
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value': {'partner_invoice_id': False, 'partner_shipping_id': False, 'payment_term': False, 'fiscal_position': False}}
part = self.pool.get('res.partner').browse(cr, uid, part, context=context)
addr = self.pool.get('res.partner').address_get(cr, uid, [part.id], ['delivery', 'invoice', 'contact'])
pricelist = part.property_product_pricelist and part.property_product_pricelist.id or False
payment_term = part.property_payment_term and part.property_payment_term.id or False
dedicated_salesman = part.user_id and part.user_id.id or uid
val = {
'partner_invoice_id': addr['invoice'],
'partner_shipping_id': addr['delivery'],
'payment_term': payment_term,
'user_id': dedicated_salesman,
}
delivery_onchange = self.onchange_delivery_id(cr, uid, ids, False, part.id, addr['delivery'], False, context=context)
val.update(delivery_onchange['value'])
if pricelist:
val['pricelist_id'] = pricelist
sale_note = self.get_salenote(cr, uid, ids, part.id, context=context)
if sale_note: val.update({'note': sale_note})
return {'value': val}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if vals.get('name', '/') == '/':
vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'sale.order') or '/'
if vals.get('partner_id') and any(f not in vals for f in ['partner_invoice_id', 'partner_shipping_id', 'pricelist_id', 'fiscal_position']):
defaults = self.onchange_partner_id(cr, uid, [], vals['partner_id'], context=context)['value']
if not vals.get('fiscal_position') and vals.get('partner_shipping_id'):
delivery_onchange = self.onchange_delivery_id(cr, uid, [], vals.get('company_id'), None, vals['partner_id'], vals.get('partner_shipping_id'), context=context)
defaults.update(delivery_onchange['value'])
vals = dict(defaults, **vals)
ctx = dict(context or {}, mail_create_nolog=True)
new_id = super(sale_order, self).create(cr, uid, vals, context=ctx)
self.message_post(cr, uid, [new_id], body=_("Quotation created"), context=ctx)
return new_id
def button_dummy(self, cr, uid, ids, context=None):
return True
# FIXME: deprecated method, overriders should be using _prepare_invoice() instead.
# can be removed after 6.1.
def _inv_get(self, cr, uid, order, context=None):
return {}
def _prepare_invoice(self, cr, uid, order, lines, context=None):
"""Prepare the dict of values to create the new invoice for a
sales order. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record order: sale.order record to invoice
:param list(int) line: list of invoice line IDs that must be
attached to the invoice
:return: dict of value to create() the invoice
"""
if context is None:
context = {}
journal_ids = self.pool.get('account.journal').search(cr, uid,
[('type', '=', 'sale'), ('company_id', '=', order.company_id.id)],
limit=1)
if not journal_ids:
raise osv.except_osv(_('Error!'),
_('Please define sales journal for this company: "%s" (id:%d).') % (order.company_id.name, order.company_id.id))
invoice_vals = {
'name': order.client_order_ref or '',
'origin': order.name,
'type': 'out_invoice',
'reference': order.client_order_ref or order.name,
'account_id': order.partner_id.property_account_receivable.id,
'partner_id': order.partner_invoice_id.id,
'journal_id': journal_ids[0],
'invoice_line': [(6, 0, lines)],
'currency_id': order.pricelist_id.currency_id.id,
'comment': order.note,
'payment_term': order.payment_term and order.payment_term.id or False,
'fiscal_position': order.fiscal_position.id or order.partner_id.property_account_position.id,
'date_invoice': context.get('date_invoice', False),
'company_id': order.company_id.id,
'user_id': order.user_id and order.user_id.id or False,
'section_id' : order.section_id.id
}
# Care for deprecated _inv_get() hook - FIXME: to be removed after 6.1
invoice_vals.update(self._inv_get(cr, uid, order, context=context))
return invoice_vals
def _make_invoice(self, cr, uid, order, lines, context=None):
inv_obj = self.pool.get('account.invoice')
obj_invoice_line = self.pool.get('account.invoice.line')
if context is None:
context = {}
invoiced_sale_line_ids = self.pool.get('sale.order.line').search(cr, uid, [('order_id', '=', order.id), ('invoiced', '=', True)], context=context)
from_line_invoice_ids = []
for invoiced_sale_line_id in self.pool.get('sale.order.line').browse(cr, uid, invoiced_sale_line_ids, context=context):
for invoice_line_id in invoiced_sale_line_id.invoice_lines:
if invoice_line_id.invoice_id.id not in from_line_invoice_ids:
from_line_invoice_ids.append(invoice_line_id.invoice_id.id)
for preinv in order.invoice_ids:
if preinv.state not in ('cancel',) and preinv.id not in from_line_invoice_ids:
for preline in preinv.invoice_line:
inv_line_id = obj_invoice_line.copy(cr, uid, preline.id, {'invoice_id': False, 'price_unit': -preline.price_unit})
lines.append(inv_line_id)
inv = self._prepare_invoice(cr, uid, order, lines, context=context)
inv_id = inv_obj.create(cr, uid, inv, context=context)
data = inv_obj.onchange_payment_term_date_invoice(cr, uid, [inv_id], inv['payment_term'], time.strftime(DEFAULT_SERVER_DATE_FORMAT))
if data.get('value', False):
inv_obj.write(cr, uid, [inv_id], data['value'], context=context)
inv_obj.button_compute(cr, uid, [inv_id])
return inv_id
def print_quotation(self, cr, uid, ids, context=None):
'''
This function prints the sales order and mark it as sent, so that we can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time'
self.signal_workflow(cr, uid, ids, 'quotation_sent')
return self.pool['report'].get_action(cr, uid, ids, 'sale.report_saleorder', context=context)
def manual_invoice(self, cr, uid, ids, context=None):
""" create invoices for the given sales orders (ids), and open the form
view of one of the newly created invoices
"""
mod_obj = self.pool.get('ir.model.data')
# create invoices through the sales orders' workflow
inv_ids0 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
self.signal_workflow(cr, uid, ids, 'manual_invoice')
inv_ids1 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
# determine newly created invoices
new_inv_ids = list(inv_ids1 - inv_ids0)
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
res_id = res and res[1] or False,
return {
'name': _('Customer Invoices'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id],
'res_model': 'account.invoice',
'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': new_inv_ids and new_inv_ids[0] or False,
}
def action_view_invoice(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing invoices of given sales order ids. It can either be a in a list or in a form view, if there is only one invoice to show.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_invoice_tree1')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
#compute the number of invoices to display
inv_ids = []
for so in self.browse(cr, uid, ids, context=context):
inv_ids += [invoice.id for invoice in so.invoice_ids]
#choose the view_mode accordingly
if len(inv_ids)>1:
result['domain'] = "[('id','in',["+','.join(map(str, inv_ids))+"])]"
else:
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
result['views'] = [(res and res[1] or False, 'form')]
result['res_id'] = inv_ids and inv_ids[0] or False
return result
def test_no_product(self, cr, uid, order, context):
for line in order.order_line:
if line.product_id and (line.product_id.type<>'service'):
return False
return True
def action_invoice_create(self, cr, uid, ids, grouped=False, states=None, date_invoice = False, context=None):
if states is None:
states = ['confirmed', 'done', 'exception']
res = False
invoices = {}
invoice_ids = []
invoice = self.pool.get('account.invoice')
obj_sale_order_line = self.pool.get('sale.order.line')
partner_currency = {}
# If date was specified, use it as date invoiced, usefull when invoices are generated this month and put the
# last day of the last month as invoice date
if date_invoice:
context = dict(context or {}, date_invoice=date_invoice)
for o in self.browse(cr, uid, ids, context=context):
currency_id = o.pricelist_id.currency_id.id
if (o.partner_id.id in partner_currency) and (partner_currency[o.partner_id.id] <> currency_id):
raise osv.except_osv(
_('Error!'),
_('You cannot group sales having different currencies for the same partner.'))
partner_currency[o.partner_id.id] = currency_id
lines = []
for line in o.order_line:
if line.invoiced:
continue
elif (line.state in states):
lines.append(line.id)
created_lines = obj_sale_order_line.invoice_line_create(cr, uid, lines)
if created_lines:
invoices.setdefault(o.partner_invoice_id.id or o.partner_id.id, []).append((o, created_lines))
if not invoices:
for o in self.browse(cr, uid, ids, context=context):
for i in o.invoice_ids:
if i.state == 'draft':
return i.id
for val in invoices.values():
if grouped:
res = self._make_invoice(cr, uid, val[0][0], reduce(lambda x, y: x + y, [l for o, l in val], []), context=context)
invoice_ref = ''
origin_ref = ''
for o, l in val:
invoice_ref += (o.client_order_ref or o.name) + '|'
origin_ref += (o.origin or o.name) + '|'
self.write(cr, uid, [o.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (o.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [o.id], context=context)
#remove last '|' in invoice_ref
if len(invoice_ref) >= 1:
invoice_ref = invoice_ref[:-1]
if len(origin_ref) >= 1:
origin_ref = origin_ref[:-1]
invoice.write(cr, uid, [res], {'origin': origin_ref, 'name': invoice_ref})
else:
for order, il in val:
res = self._make_invoice(cr, uid, order, il, context=context)
invoice_ids.append(res)
self.write(cr, uid, [order.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (order.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [order.id], context=context)
return res
def action_invoice_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'invoice_except'}, context=context)
return True
def action_invoice_end(self, cr, uid, ids, context=None):
for this in self.browse(cr, uid, ids, context=context):
for line in this.order_line:
if line.state == 'exception':
line.write({'state': 'confirmed'})
if this.state == 'invoice_except':
this.write({'state': 'progress'})
return True
def action_cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
sale_order_line_obj = self.pool.get('sale.order.line')
account_invoice_obj = self.pool.get('account.invoice')
procurement_obj = self.pool.get('procurement.order')
for sale in self.browse(cr, uid, ids, context=context):
for inv in sale.invoice_ids:
if inv.state not in ('draft', 'cancel'):
raise osv.except_osv(
_('Cannot cancel this sales order!'),
_('First cancel all invoices attached to this sales order.'))
inv.signal_workflow('invoice_cancel')
procurement_obj.cancel(cr, uid, sum([l.procurement_ids.ids for l in sale.order_line],[]))
sale_order_line_obj.write(cr, uid, [l.id for l in sale.order_line],
{'state': 'cancel'})
self.write(cr, uid, ids, {'state': 'cancel'})
return True
def action_button_confirm(self, cr, uid, ids, context=None):
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
self.signal_workflow(cr, uid, ids, 'order_confirm')
return True
def action_wait(self, cr, uid, ids, context=None):
context = context or {}
for o in self.browse(cr, uid, ids):
if not o.order_line:
raise osv.except_osv(_('Error!'),_('You cannot confirm a sales order which has no line.'))
noprod = self.test_no_product(cr, uid, o, context)
if (o.order_policy == 'manual') or noprod:
self.write(cr, uid, [o.id], {'state': 'manual', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
else:
self.write(cr, uid, [o.id], {'state': 'progress', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
self.pool.get('sale.order.line').button_confirm(cr, uid, [x.id for x in o.order_line])
return True
def action_quotation_send(self, cr, uid, ids, context=None):
'''
This function opens a window to compose an email, with the edi sale template message loaded by default
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
ir_model_data = self.pool.get('ir.model.data')
try:
template_id = ir_model_data.get_object_reference(cr, uid, 'sale', 'email_template_edi_sale')[1]
except ValueError:
template_id = False
try:
compose_form_id = ir_model_data.get_object_reference(cr, uid, 'mail', 'email_compose_message_wizard_form')[1]
except ValueError:
compose_form_id = False
ctx = dict()
ctx.update({
'default_model': 'sale.order',
'default_res_id': ids[0],
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
return {
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form_id, 'form')],
'view_id': compose_form_id,
'target': 'new',
'context': ctx,
}
def action_done(self, cr, uid, ids, context=None):
for order in self.browse(cr, uid, ids, context=context):
self.pool.get('sale.order.line').write(cr, uid, [line.id for line in order.order_line], {'state': 'done'}, context=context)
return self.write(cr, uid, ids, {'state': 'done'}, context=context)
def _prepare_order_line_procurement(self, cr, uid, order, line, group_id=False, context=None):
date_planned = self._get_date_planned(cr, uid, order, line, order.date_order, context=context)
return {
'name': line.name,
'origin': order.name,
'date_planned': date_planned,
'product_id': line.product_id.id,
'product_qty': line.product_uom_qty,
'product_uom': line.product_uom.id,
'product_uos_qty': (line.product_uos and line.product_uos_qty) or line.product_uom_qty,
'product_uos': (line.product_uos and line.product_uos.id) or line.product_uom.id,
'company_id': order.company_id.id,
'group_id': group_id,
'invoice_state': (order.order_policy == 'picking') and '2binvoiced' or 'none',
'sale_line_id': line.id
}
def _get_date_planned(self, cr, uid, order, line, start_date, context=None):
date_planned = datetime.strptime(start_date, DEFAULT_SERVER_DATETIME_FORMAT) + timedelta(days=line.delay or 0.0)
return date_planned
def _prepare_procurement_group(self, cr, uid, order, context=None):
return {'name': order.name, 'partner_id': order.partner_shipping_id.id}
def procurement_needed(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
sale_line_obj = self.pool.get('sale.order.line')
res = []
for order in self.browse(cr, uid, ids, context=context):
res.append(sale_line_obj.need_procurement(cr, uid, [line.id for line in order.order_line], context=context))
return any(res)
def action_ignore_delivery_exception(self, cr, uid, ids, context=None):
for sale_order in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, ids, {'state': 'progress' if sale_order.invoice_exists else 'manual'}, context=context)
return True
def action_ship_create(self, cr, uid, ids, context=None):
"""Create the required procurements to supply sales order lines, also connecting
the procurements to appropriate stock moves in order to bring the goods to the
sales order's requested location.
:return: True
"""
context = context or {}
context['lang'] = self.pool['res.users'].browse(cr, uid, uid).lang
procurement_obj = self.pool.get('procurement.order')
sale_line_obj = self.pool.get('sale.order.line')
for order in self.browse(cr, uid, ids, context=context):
proc_ids = []
vals = self._prepare_procurement_group(cr, uid, order, context=context)
if not order.procurement_group_id:
group_id = self.pool.get("procurement.group").create(cr, uid, vals, context=context)
order.write({'procurement_group_id': group_id})
for line in order.order_line:
#Try to fix exception procurement (possible when after a shipping exception the user choose to recreate)
if line.procurement_ids:
#first check them to see if they are in exception or not (one of the related moves is cancelled)
procurement_obj.check(cr, uid, [x.id for x in line.procurement_ids if x.state not in ['cancel', 'done']])
line.refresh()
#run again procurement that are in exception in order to trigger another move
except_proc_ids = [x.id for x in line.procurement_ids if x.state in ('exception', 'cancel')]
procurement_obj.reset_to_confirmed(cr, uid, except_proc_ids, context=context)
proc_ids += except_proc_ids
elif sale_line_obj.need_procurement(cr, uid, [line.id], context=context):
if (line.state == 'done') or not line.product_id:
continue
vals = self._prepare_order_line_procurement(cr, uid, order, line, group_id=order.procurement_group_id.id, context=context)
ctx = context.copy()
ctx['procurement_autorun_defer'] = True
proc_id = procurement_obj.create(cr, uid, vals, context=ctx)
proc_ids.append(proc_id)
#Confirm procurement order such that rules will be applied on it
#note that the workflow normally ensure proc_ids isn't an empty list
procurement_obj.run(cr, uid, proc_ids, context=context)
#if shipping was in exception and the user choose to recreate the delivery order, write the new status of SO
if order.state == 'shipping_except':
val = {'state': 'progress', 'shipped': False}
if (order.order_policy == 'manual'):
for line in order.order_line:
if (not line.invoiced) and (line.state not in ('cancel', 'draft')):
val['state'] = 'manual'
break
order.write(val)
return True
def onchange_fiscal_position(self, cr, uid, ids, fiscal_position, order_lines, context=None):
'''Update taxes of order lines for each line where a product is defined
:param list ids: not used
:param int fiscal_position: sale order fiscal position
:param list order_lines: command list for one2many write method
'''
order_line = []
fiscal_obj = self.pool.get('account.fiscal.position')
product_obj = self.pool.get('product.product')
line_obj = self.pool.get('sale.order.line')
fpos = False
if fiscal_position:
fpos = fiscal_obj.browse(cr, uid, fiscal_position, context=context)
for line in order_lines:
# create (0, 0, { fields })
# update (1, ID, { fields })
if line[0] in [0, 1]:
prod = None
if line[2].get('product_id'):
prod = product_obj.browse(cr, uid, line[2]['product_id'], context=context)
elif line[1]:
prod = line_obj.browse(cr, uid, line[1], context=context).product_id
if prod and prod.taxes_id:
line[2]['tax_id'] = [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]
order_line.append(line)
# link (4, ID)
# link all (6, 0, IDS)
elif line[0] in [4, 6]:
line_ids = line[0] == 4 and [line[1]] or line[2]
for line_id in line_ids:
prod = line_obj.browse(cr, uid, line_id, context=context).product_id
if prod and prod.taxes_id:
order_line.append([1, line_id, {'tax_id': [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]}])
else:
order_line.append([4, line_id])
else:
order_line.append(line)
return {'value': {'order_line': order_line}}
def test_procurements_done(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if not all([x.state == 'done' for x in line.procurement_ids]):
return False
return True
def test_procurements_except(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if any([x.state == 'cancel' for x in line.procurement_ids]):
return True
return False
# TODO add a field price_unit_uos
# - update it on change product and unit price
# - use it in report if there is a uos
class sale_order_line(osv.osv):
def need_procurement(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
prod_obj = self.pool.get('product.product')
for line in self.browse(cr, uid, ids, context=context):
if prod_obj.need_procurement(cr, uid, [line.product_id.id], context=context):
return True
return False
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
res = {}
if context is None:
context = {}
for line in self.browse(cr, uid, ids, context=context):
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = tax_obj.compute_all(cr, uid, line.tax_id, price, line.product_uom_qty, line.product_id, line.order_id.partner_id)
cur = line.order_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
return res
def _get_uom_id(self, cr, uid, *args):
try:
proxy = self.pool.get('ir.model.data')
result = proxy.get_object_reference(cr, uid, 'product', 'product_uom_unit')
return result[1]
except Exception, ex:
return False
def _fnct_line_invoiced(self, cr, uid, ids, field_name, args, context=None):
res = dict.fromkeys(ids, False)
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = this.invoice_lines and \
all(iline.invoice_id.state != 'cancel' for iline in this.invoice_lines)
return res
def _order_lines_from_invoice(self, cr, uid, ids, context=None):
# direct access to the m2m table is the less convoluted way to achieve this (and is ok ACL-wise)
cr.execute("""SELECT DISTINCT sol.id FROM sale_order_invoice_rel rel JOIN
sale_order_line sol ON (sol.order_id = rel.order_id)
WHERE rel.invoice_id = ANY(%s)""", (list(ids),))
return [i[0] for i in cr.fetchall()]
def _get_price_reduce(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0.0)
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = line.price_subtotal / line.product_uom_qty
return res
_name = 'sale.order.line'
_description = 'Sales Order Line'
_columns = {
'order_id': fields.many2one('sale.order', 'Order Reference', required=True, ondelete='cascade', select=True, readonly=True, states={'draft':[('readonly',False)]}),
'name': fields.text('Description', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sales order lines."),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], change_default=True, readonly=True, states={'draft': [('readonly', False)]}, ondelete='restrict'),
'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True, copy=False),
'invoiced': fields.function(_fnct_line_invoiced, string='Invoiced', type='boolean',
store={
'account.invoice': (_order_lines_from_invoice, ['state'], 10),
'sale.order.line': (lambda self,cr,uid,ids,ctx=None: ids, ['invoice_lines'], 10)
}),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price'), readonly=True, states={'draft': [('readonly', False)]}),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute= dp.get_precision('Account')),
'price_reduce': fields.function(_get_price_reduce, type='float', string='Price Reduce', digits_compute=dp.get_precision('Product Price')),
'tax_id': fields.many2many('account.tax', 'sale_order_tax', 'order_line_id', 'tax_id', 'Taxes', readonly=True, states={'draft': [('readonly', False)]}),
'address_allotment_id': fields.many2one('res.partner', 'Allotment Partner',help="A partner to whom the particular product needs to be allotted."),
'product_uom_qty': fields.float('Quantity', digits_compute= dp.get_precision('Product UoS'), required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uom': fields.many2one('product.uom', 'Unit of Measure ', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uos_qty': fields.float('Quantity (UoS)' ,digits_compute= dp.get_precision('Product UoS'), readonly=True, states={'draft': [('readonly', False)]}),
'product_uos': fields.many2one('product.uom', 'Product UoS'),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount'), readonly=True, states={'draft': [('readonly', False)]}),
'th_weight': fields.float('Weight', readonly=True, states={'draft': [('readonly', False)]}),
'state': fields.selection(
[('cancel', 'Cancelled'),('draft', 'Draft'),('confirmed', 'Confirmed'),('exception', 'Exception'),('done', 'Done')],
'Status', required=True, readonly=True, copy=False,
help='* The \'Draft\' status is set when the related sales order in draft status. \
\n* The \'Confirmed\' status is set when the related sales order is confirmed. \
\n* The \'Exception\' status is set when the related sales order is set as exception. \
\n* The \'Done\' status is set when the sales order line has been picked. \
\n* The \'Cancelled\' status is set when a user cancel the sales order related.'),
'order_partner_id': fields.related('order_id', 'partner_id', type='many2one', relation='res.partner', store=True, string='Customer'),
'salesman_id':fields.related('order_id', 'user_id', type='many2one', relation='res.users', store=True, string='Salesperson'),
'company_id': fields.related('order_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'delay': fields.float('Delivery Lead Time', required=True, help="Number of days between the order confirmation and the shipping of the products to the customer", readonly=True, states={'draft': [('readonly', False)]}),
'procurement_ids': fields.one2many('procurement.order', 'sale_line_id', 'Procurements'),
}
_order = 'order_id desc, sequence, id'
_defaults = {
'product_uom' : _get_uom_id,
'discount': 0.0,
'product_uom_qty': 1,
'product_uos_qty': 1,
'sequence': 10,
'state': 'draft',
'price_unit': 0.0,
'delay': 0.0,
}
def _get_line_qty(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos_qty or 0.0
return line.product_uom_qty
def _get_line_uom(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos.id
return line.product_uom.id
def _prepare_order_line_invoice_line(self, cr, uid, line, account_id=False, context=None):
"""Prepare the dict of values to create the new invoice line for a
sales order line. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record line: sale.order.line record to invoice
:param int account_id: optional ID of a G/L account to force
(this is used for returning products including service)
:return: dict of values to create() the invoice line
"""
res = {}
if not line.invoiced:
if not account_id:
if line.product_id:
account_id = line.product_id.property_account_income.id
if not account_id:
account_id = line.product_id.categ_id.property_account_income_categ.id
if not account_id:
raise osv.except_osv(_('Error!'),
_('Please define income account for this product: "%s" (id:%d).') % \
(line.product_id.name, line.product_id.id,))
else:
prop = self.pool.get('ir.property').get(cr, uid,
'property_account_income_categ', 'product.category',
context=context)
account_id = prop and prop.id or False
uosqty = self._get_line_qty(cr, uid, line, context=context)
uos_id = self._get_line_uom(cr, uid, line, context=context)
pu = 0.0
if uosqty:
pu = round(line.price_unit * line.product_uom_qty / uosqty,
self.pool.get('decimal.precision').precision_get(cr, uid, 'Product Price'))
fpos = line.order_id.fiscal_position or False
account_id = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, account_id)
if not account_id:
raise osv.except_osv(_('Error!'),
_('There is no Fiscal Position defined or Income category account defined for default properties of Product categories.'))
res = {
'name': line.name,
'sequence': line.sequence,
'origin': line.order_id.name,
'account_id': account_id,
'price_unit': pu,
'quantity': uosqty,
'discount': line.discount,
'uos_id': uos_id,
'product_id': line.product_id.id or False,
'invoice_line_tax_id': [(6, 0, [x.id for x in line.tax_id])],
'account_analytic_id': line.order_id.project_id and line.order_id.project_id.id or False,
}
return res
def invoice_line_create(self, cr, uid, ids, context=None):
if context is None:
context = {}
create_ids = []
sales = set()
for line in self.browse(cr, uid, ids, context=context):
vals = self._prepare_order_line_invoice_line(cr, uid, line, False, context)
if vals:
inv_id = self.pool.get('account.invoice.line').create(cr, uid, vals, context=context)
self.write(cr, uid, [line.id], {'invoice_lines': [(4, inv_id)]}, context=context)
sales.add(line.order_id.id)
create_ids.append(inv_id)
# Trigger workflow events
for sale_id in sales:
workflow.trg_write(uid, 'sale.order', sale_id, cr)
return create_ids
def button_cancel(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
if line.invoiced:
raise osv.except_osv(_('Invalid Action!'), _('You cannot cancel a sales order line that has already been invoiced.'))
return self.write(cr, uid, ids, {'state': 'cancel'})
def button_confirm(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'confirmed'})
def button_done(self, cr, uid, ids, context=None):
res = self.write(cr, uid, ids, {'state': 'done'})
for line in self.browse(cr, uid, ids, context=context):
workflow.trg_write(uid, 'sale.order', line.order_id.id, cr)
return res
def uos_change(self, cr, uid, ids, product_uos, product_uos_qty=0, product_id=None):
product_obj = self.pool.get('product.product')
if not product_id:
return {'value': {'product_uom': product_uos,
'product_uom_qty': product_uos_qty}, 'domain': {}}
product = product_obj.browse(cr, uid, product_id)
value = {
'product_uom': product.uom_id.id,
}
# FIXME must depend on uos/uom of the product and not only of the coeff.
try:
value.update({
'product_uom_qty': product_uos_qty / product.uos_coeff,
'th_weight': product_uos_qty / product.uos_coeff * product.weight
})
except ZeroDivisionError:
pass
return {'value': value}
def create(self, cr, uid, values, context=None):
if values.get('order_id') and values.get('product_id') and any(f not in values for f in ['name', 'price_unit', 'type', 'product_uom_qty', 'product_uom']):
order = self.pool['sale.order'].read(cr, uid, values['order_id'], ['pricelist_id', 'partner_id', 'date_order', 'fiscal_position'], context=context)
defaults = self.product_id_change(cr, uid, [], order['pricelist_id'][0], values['product_id'],
qty=float(values.get('product_uom_qty', False)),
uom=values.get('product_uom', False),
qty_uos=float(values.get('product_uos_qty', False)),
uos=values.get('product_uos', False),
name=values.get('name', False),
partner_id=order['partner_id'][0],
date_order=order['date_order'],
fiscal_position=order['fiscal_position'][0] if order['fiscal_position'] else False,
flag=False, # Force name update
context=context
)['value']
if defaults.get('tax_id'):
defaults['tax_id'] = [[6, 0, defaults['tax_id']]]
values = dict(defaults, **values)
return super(sale_order_line, self).create(cr, uid, values, context=context)
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None):
context = context or {}
lang = lang or context.get('lang', False)
if not partner_id:
raise osv.except_osv(_('No Customer Defined!'), _('Before choosing a product,\n select a customer in the sales form.'))
warning = False
product_uom_obj = self.pool.get('product.uom')
partner_obj = self.pool.get('res.partner')
product_obj = self.pool.get('product.product')
context = {'lang': lang, 'partner_id': partner_id}
partner = partner_obj.browse(cr, uid, partner_id)
lang = partner.lang
context_partner = {'lang': lang, 'partner_id': partner_id}
if not product:
return {'value': {'th_weight': 0,
'product_uos_qty': qty}, 'domain': {'product_uom': [],
'product_uos': []}}
if not date_order:
date_order = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
result = {}
warning_msgs = ''
product_obj = product_obj.browse(cr, uid, product, context=context_partner)
uom2 = False
if uom:
uom2 = product_uom_obj.browse(cr, uid, uom)
if product_obj.uom_id.category_id.id != uom2.category_id.id:
uom = False
if uos:
if product_obj.uos_id:
uos2 = product_uom_obj.browse(cr, uid, uos)
if product_obj.uos_id.category_id.id != uos2.category_id.id:
uos = False
else:
uos = False
fpos = False
if not fiscal_position:
fpos = partner.property_account_position or False
else:
fpos = self.pool.get('account.fiscal.position').browse(cr, uid, fiscal_position)
if update_tax: #The quantity only have changed
result['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, fpos, product_obj.taxes_id)
if not flag:
result['name'] = self.pool.get('product.product').name_get(cr, uid, [product_obj.id], context=context_partner)[0][1]
if product_obj.description_sale:
result['name'] += '\n'+product_obj.description_sale
domain = {}
if (not uom) and (not uos):
result['product_uom'] = product_obj.uom_id.id
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
uos_category_id = product_obj.uos_id.category_id.id
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
uos_category_id = False
result['th_weight'] = qty * product_obj.weight
domain = {'product_uom':
[('category_id', '=', product_obj.uom_id.category_id.id)],
'product_uos':
[('category_id', '=', uos_category_id)]}
elif uos and not uom: # only happens if uom is False
result['product_uom'] = product_obj.uom_id and product_obj.uom_id.id
result['product_uom_qty'] = qty_uos / product_obj.uos_coeff
result['th_weight'] = result['product_uom_qty'] * product_obj.weight
elif uom: # whether uos is set or not
default_uom = product_obj.uom_id and product_obj.uom_id.id
q = product_uom_obj._compute_qty(cr, uid, uom, qty, default_uom)
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
result['th_weight'] = q * product_obj.weight # Round the quantity up
if not uom2:
uom2 = product_obj.uom_id
# get unit price
if not pricelist:
warn_msg = _('You have to select a pricelist or a customer in the sales form !\n'
'Please set one before choosing a product.')
warning_msgs += _("No Pricelist ! : ") + warn_msg +"\n\n"
else:
price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist],
product, qty or 1.0, partner_id, {
'uom': uom or result.get('product_uom'),
'date': date_order,
})[pricelist]
if price is False:
warn_msg = _("Cannot find a pricelist line matching this product and quantity.\n"
"You have to change either the product, the quantity or the pricelist.")
warning_msgs += _("No valid pricelist line found ! :") + warn_msg +"\n\n"
else:
result.update({'price_unit': price})
if warning_msgs:
warning = {
'title': _('Configuration Error!'),
'message' : warning_msgs
}
return {'value': result, 'domain': domain, 'warning': warning}
def product_uom_change(self, cursor, user, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, context=None):
context = context or {}
lang = lang or ('lang' in context and context['lang'])
if not uom:
return {'value': {'price_unit': 0.0, 'product_uom' : uom or False}}
return self.product_id_change(cursor, user, ids, pricelist, product,
qty=qty, uom=uom, qty_uos=qty_uos, uos=uos, name=name,
partner_id=partner_id, lang=lang, update_tax=update_tax,
date_order=date_order, context=context)
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
"""Allows to delete sales order lines in draft,cancel states"""
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ['draft', 'cancel']:
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete a sales order line which is in state \'%s\'.') %(rec.state,))
return super(sale_order_line, self).unlink(cr, uid, ids, context=context)
class mail_compose_message(osv.Model):
_inherit = 'mail.compose.message'
def send_mail(self, cr, uid, ids, context=None):
context = context or {}
if context.get('default_model') == 'sale.order' and context.get('default_res_id') and context.get('mark_so_as_sent'):
context = dict(context, mail_post_autofollow=True)
self.pool.get('sale.order').signal_workflow(cr, uid, [context['default_res_id']], 'quotation_sent')
return super(mail_compose_message, self).send_mail(cr, uid, ids, context=context)
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def _get_default_section_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
section_id = self._resolve_section_id_from_context(cr, uid, context=context) or False
if not section_id:
section_id = self.pool.get('res.users').browse(cr, uid, uid, context).default_section_id.id or False
return section_id
def _resolve_section_id_from_context(self, cr, uid, context=None):
""" Returns ID of section based on the value of 'section_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_section_id')) in (int, long):
return context.get('default_section_id')
if isinstance(context.get('default_section_id'), basestring):
section_ids = self.pool.get('crm.case.section').name_search(cr, uid, name=context['default_section_id'], context=context)
if len(section_ids) == 1:
return int(section_ids[0][0])
return None
_columns = {
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_defaults = {
'section_id': lambda self, cr, uid, c=None: self._get_default_section_id(cr, uid, context=c)
}
def confirm_paid(self, cr, uid, ids, context=None):
sale_order_obj = self.pool.get('sale.order')
res = super(account_invoice, self).confirm_paid(cr, uid, ids, context=context)
so_ids = sale_order_obj.search(cr, uid, [('invoice_ids', 'in', ids)], context=context)
for so_id in so_ids:
sale_order_obj.message_post(cr, uid, so_id, body=_("Invoice paid"), context=context)
return res
def unlink(self, cr, uid, ids, context=None):
""" Overwrite unlink method of account invoice to send a trigger to the sale workflow upon invoice deletion """
invoice_ids = self.search(cr, uid, [('id', 'in', ids), ('state', 'in', ['draft', 'cancel'])], context=context)
#if we can't cancel all invoices, do nothing
if len(invoice_ids) == len(ids):
#Cancel invoice(s) first before deleting them so that if any sale order is associated with them
#it will trigger the workflow to put the sale order in an 'invoice exception' state
for id in ids:
workflow.trg_validate(uid, 'account.invoice', id, 'invoice_cancel', cr)
return super(account_invoice, self).unlink(cr, uid, ids, context=context)
class procurement_order(osv.osv):
_inherit = 'procurement.order'
_columns = {
'sale_line_id': fields.many2one('sale.order.line', string='Sale Order Line'),
}
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
res = super(procurement_order, self).write(cr, uid, ids, vals, context=context)
from openerp import workflow
if vals.get('state') in ['done', 'cancel', 'exception']:
for proc in self.browse(cr, uid, ids, context=context):
if proc.sale_line_id and proc.sale_line_id.order_id:
order_id = proc.sale_line_id.order_id.id
if self.pool.get('sale.order').test_procurements_done(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_end', cr)
if self.pool.get('sale.order').test_procurements_except(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_except', cr)
return res
class product_product(osv.Model):
_inherit = 'product.product'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
r = dict.fromkeys(ids, 0)
domain = [
('state', 'in', ['waiting_date','progress','manual', 'shipping_except', 'invoice_except', 'done']),
('product_id', 'in', ids),
]
for group in self.pool['sale.report'].read_group(cr, uid, domain, ['product_id','product_uom_qty'], ['product_id'], context=context):
r[group['product_id'][0]] = group['product_uom_qty']
return r
def action_view_sales(self, cr, uid, ids, context=None):
result = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'sale.action_order_line_product_tree', raise_if_not_found=True)
result = self.pool['ir.actions.act_window'].read(cr, uid, [result], context=context)[0]
result['domain'] = "[('product_id','in',[" + ','.join(map(str, ids)) + "])]"
return result
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
class product_template(osv.Model):
_inherit = 'product.template'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0)
for template in self.browse(cr, uid, ids, context=context):
res[template.id] = sum([p.sales_count for p in template.product_variant_ids])
return res
def action_view_sales(self, cr, uid, ids, context=None):
act_obj = self.pool.get('ir.actions.act_window')
mod_obj = self.pool.get('ir.model.data')
product_ids = []
for template in self.browse(cr, uid, ids, context=context):
product_ids += [x.id for x in template.product_variant_ids]
result = mod_obj.xmlid_to_res_id(cr, uid, 'sale.action_order_line_product_tree',raise_if_not_found=True)
result = act_obj.read(cr, uid, [result], context=context)[0]
result['domain'] = "[('product_id','in',[" + ','.join(map(str, product_ids)) + "])]"
return result
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| 5,077,323,756,699,645,000
| 52.384207
| 318
| 0.579615
| false
| 3.812951
| false
| false
| false
|
Vutshi/qutip
|
qutip/examples/ex_52.py
|
1
|
2978
|
#
# Landau-Zener-Stuckelberg interferometry: steady state of repeated
# Landau-Zener like avoided-level crossing, as a function of driving amplitude
# and bias.
#
# Note: In order to get this example to work properly in the demos window,
# we have had to pass many more variables to parfor than is typically
# necessary.
from qutip import *
from pylab import *
# a task function for the for-loop parallelization:
# the m-index is parallelized in loop over the elements of p_mat[m,n]
def task(args):
m, H_td, c_op_list, sn, A_list, T, w, eps = args
p_mat_m = zeros(len(A_list))
for n, A in enumerate(A_list):
# change args sent to solver, w is really a constant though.
Hargs = {'w': w, 'eps': eps, 'A': A}
# settings (for reusing list-str format Hamiltonian)
U = propagator(H_td, T, c_op_list, Hargs, Odeoptions(rhs_reuse=True))
rho_ss = propagator_steadystate(U)
p_mat_m[n] = expect(sn, rho_ss)
return [m, p_mat_m]
def run():
# set up the parameters and start calculation
delta = 0.1 * 2 * pi # qubit sigma_x coefficient
w = 2.0 * 2 * pi # driving frequency
T = 2 * pi / w # driving period
gamma1 = 0.00001 # relaxation rate
gamma2 = 0.005 # dephasing rate
eps_list = linspace(-10.0, 10.0, 101) * 2 * pi
A_list = linspace(0.0, 20.0, 101) * 2 * pi
# pre-calculate the necessary operators
sx = sigmax()
sz = sigmaz()
sm = destroy(2)
sn = num(2)
# collapse operators: relaxation and dephasing
c_op_list = [sqrt(gamma1) * sm, sqrt(gamma2) * sz]
# setup time-dependent Hamiltonian (list-string format)
H0 = -delta / 2.0 * sx
H1 = [sz, '-eps/2.0+A/2.0*sin(w * t)']
H_td = [H0, H1]
Hargs = {'w': w, 'eps': eps_list[0], 'A': A_list[0]}
# pre-generate RHS so we can use parfor
rhs_generate(H_td, c_op_list, Hargs, name='lz_func')
# start a parallel for loop over bias point values (eps_list)
parfor_args = [[k, H_td, c_op_list, sn, A_list, T, w, eps_list[k]]
for k in range(len(eps_list))]
p_mat_list = parfor(task, parfor_args)
# assemble a matrix p_mat from list of (index,array) tuples returned by
# parfor
p_mat = zeros((len(eps_list), len(A_list)))
for m, p_mat_m in p_mat_list:
p_mat[m, :] = p_mat_m
# Plot the results
A_mat, eps_mat = meshgrid(A_list / (2 * pi), eps_list / (2 * pi))
fig = figure()
ax = fig.add_axes([0.1, 0.1, 0.9, 0.8])
c = ax.pcolor(eps_mat, A_mat, p_mat)
c.set_cmap('RdYlBu_r')
cbar = fig.colorbar(c)
cbar.set_label("Probability")
ax.set_xlabel(r'Bias point $\epsilon$')
ax.set_ylabel(r'Amplitude $A$')
ax.autoscale(tight=True)
title('Steadystate excitation probability\n' +
r'$H = -\frac{1}{2}\Delta\sigma_x - \frac{1}{2}\epsilon\sigma_z' +
r' - \frac{1}{2}A\sin(\omega t)$' + '\\n')
show()
close()
if __name__ == '__main__':
run()
|
gpl-3.0
| -7,503,352,612,644,134,000
| 34.035294
| 78
| 0.595366
| false
| 2.801505
| false
| false
| false
|
listyque/TACTIC-Handler
|
thlib/side/console/ui/output_window.py
|
1
|
2451
|
from thlib.side.Qt import QtWidgets as QtGui
from thlib.side.Qt import QtGui as Qt4Gui
class OutputWindow(QtGui.QPlainTextEdit):
def __init__(self, parent=None):
"""
Initialize default settings.
"""
QtGui.QPlainTextEdit.__init__(self, parent)
self.setTabStopWidth(4 * self.fontMetrics().width(" "))
self.__current_write_state = "output"
def scroll_to_bottom(self):
"""
Scroll to bottom.
"""
scrollbar = self.verticalScrollBar()
scrollbar.setValue(scrollbar.maximum())
self.moveCursor(Qt4Gui.QTextCursor.End)
def write_input(self, text):
if self.__current_write_state != "input":
self.__current_write_state = "input"
# text = unicode(text)
text = text.replace("\\r", "\r")
text = text.replace("\\n", "\n")
text = text.replace(" ", " ")
text = text.replace("<", "<")
text = text.replace(">", ">")
for line in text.splitlines():
line = '<font color="#A9A9A9">' + line + '</font><br>'
self.__write_html_output(line)
# QtCore.QCoreApplication.processEvents(QtCore.QEventLoop.ExcludeUserInputEvents)
self.scroll_to_bottom()
def write_output(self, text):
if self.__current_write_state != "output":
self.__current_write_state = "output"
text = text.replace("\\r", "\r")
text = text.replace("\\n", "\n")
self.moveCursor(Qt4Gui.QTextCursor.End)
self.insertPlainText(text)
self.moveCursor(Qt4Gui.QTextCursor.End)
self.scroll_to_bottom()
def write_error(self, text):
if self.__current_write_state != "error":
self.__current_write_state = "error"
# text = unicode(text)
text = text.replace("\\r", "\r")
text = text.replace("\\n", "\n")
text = text.replace(" ", " ")
text = text.replace("<", "<")
text = text.replace(">", ">")
for line in text.splitlines():
line = '<font color="#ff9999">' + line + '</font><br>'
self.__write_html_output(line)
self.scroll_to_bottom()
def __write_html_output(self, text):
"""
Write text as html output.
"""
self.moveCursor(Qt4Gui.QTextCursor.End)
self.textCursor().insertHtml(text)
self.moveCursor(Qt4Gui.QTextCursor.End)
|
epl-1.0
| -5,648,691,627,177,380,000
| 28.178571
| 93
| 0.555692
| false
| 3.753446
| false
| false
| false
|
Sterncat/opticspy
|
opticspy/lens/aberration.py
|
1
|
2204
|
import numpy as __np__
import matplotlib.pyplot as __plt__
def third(s1,s2,s3,s4,s5):
"""
Third order aberrations:
Ray aberrations
Field curve
Distortion
input: third order aberration coefficient
sigma 1~5
output: third order aberration graph
"""
print("third order aberration")
py = __np__.linspace(-1,1,100)
px = __np__.linspace(0,1,50)
height = [1,0.7,0]
count = 0
ax = []
maxTan = 0
maxSag = 0
fig = __plt__.figure(1)
for h in height:
Tan = s1*py**3+3*s2*h*py**2+(3*s3+s4)*h**2.*py+s5*h**3
ax.append(__plt__.subplot2grid((3, 3), (count, 0), colspan=2))
__plt__.plot(py, Tan)
if maxTan < max(abs(Tan)): maxTan = max(abs(Tan))
if count == 0: __plt__.title('TANGENTIAL')
__plt__.axis([-1, 1, -maxTan, maxTan])
if count == len(height)-1: __plt__.xlabel('\n' + r'$\rho_y$',fontsize=20)
__plt__.ylabel('h = '+str(h),fontsize=15)
__plt__.grid(True)
Sag = s1*px**3+(s3+s4)*h**2*px
ax.append(__plt__.subplot2grid((3, 3), (count, 2)))
__plt__.plot(px, Sag)
if maxSag < max(abs(Sag)): maxSag = max(abs(Sag))
__plt__.axis([0, 1, -maxSag, maxSag])
if count == 0: __plt__.title('SAGITTAL')
if count == len(height)-1: __plt__.xlabel('\n' + r'$\rho_x$',fontsize=20)
__plt__.grid(True)
count = count + 1
fig.set_tight_layout(True)
__plt__.show()
def fieldcurve(sigma3 = 0.05, sigma4 = -0.05, FNO = 10, H = 20):
"""
sigma3 Astigmatism Coefficient
sigma4 Petzval Coefficient
FNO F-number
H Image Height
"""
uak = -1.00/(2*FNO) # maginal ray angle
h = __np__.linspace(0,1,40)
XP = -sigma4/uak*h**2
XT = -(3*sigma3+sigma4)/uak*h**2
XS = -(sigma3+sigma4)/uak*h**2
fig = __plt__.figure(figsize=(6, 8), dpi=80)
__plt__.plot(XP, h*H, 'b-*', label='P')
__plt__.plot(XT, h*H, 'b--', label='T')
__plt__.plot(XS, h*H, 'b', label='S')
__plt__.xlabel('Surface sag(mm)',fontsize=18)
__plt__.ylabel('Real image height(mm)',fontsize=18)
legend = __plt__.legend(loc='lower left', shadow=True, fontsize='x-large')
__plt__.title(r'$\sigma3 = $'+str(round(sigma3,4))+' '+r'$\sigma4 = $'+str(sigma4),fontsize=18)
#__plt__.axis([-16, 5, 0, H])
__plt__.grid(b=True, which='both', color='0.65',linestyle='--')
__plt__.show()
return 0
|
mit
| -6,850,834,233,485,839,000
| 27.636364
| 96
| 0.585299
| false
| 2.298227
| false
| false
| false
|
xraywu/wegene-python-sdk
|
wegene/Controllers/Psychology.py
|
1
|
2949
|
# -*- coding: utf-8 -*-
"""
wegene.Controllers.PsychologyController
This file was automatically generated by APIMATIC BETA v2.0 on 02/22/2016
"""
import requests
from wegene.APIHelper import APIHelper
from wegene.Configuration import Configuration
from wegene.APIException import APIException
from wegene.Models.Report import Report
class Psychology(object):
"""A Controller to access Endpoints in the WeGeneAPILib API."""
def get_psychology(self,
profile_id,
report_id):
"""Does a POST request to /psychology/{profile_id}.
Psychology profile based on genetic information
Args:
profile_id (string): Genetic profile id
report_id (string): Report Id for the specific health risk to
look
Returns:
Report: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# The base uri for api requests
query_builder = Configuration.BASE_URI
# Prepare query string for API call
query_builder += "/psychology/{profile_id}"
# Process optional template parameters
query_builder = APIHelper.append_url_with_template_parameters(query_builder, {
"profile_id": profile_id
})
# Validate and preprocess url
query_url = APIHelper.clean_url(query_builder)
# Prepare headers
headers = {
"Authorization": "Bearer " + Configuration.o_auth_access_token,
"user-agent": "WeGene SDK",
"accept": "application/json",
}
# Prepare parameters
parameters = {
"report_id": report_id
}
# Prepare and invoke the API call request to fetch the response
response = requests.post(query_url, headers=headers, data=parameters)
# Error handling using HTTP status codes
if response.status_code < 200 or response.status_code > 206: # 200 = HTTP OK
raise APIException("HTTP Response Not OK",
response.status_code, response.json())
# Try to cast response to desired type
if isinstance(response.json(), dict):
# Response is already in a dictionary, return the object
try:
return Report(**response.json())
except TypeError:
raise APIException("Invalid JSON returned",
response.status_code, response.json())
# If we got here then an error occured while trying to parse the response
raise APIException("Invalid JSON returned",
response.status_code, response.json())
|
mit
| -6,103,705,326,501,828,000
| 32.134831
| 86
| 0.602238
| false
| 4.802932
| false
| false
| false
|
wphicks/Writing3D
|
pyw3d/blender_actions/visibility.py
|
1
|
3616
|
# Copyright (C) 2016 William Hicks
#
# This file is part of Writing3D.
#
# Writing3D is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
"""Tools for changing the visibility of a Blender object"""
class VisibilityAction(object):
"""Generate Python logic for how visibility should change when action first
starts, as it continues, and when it ends
:param bool visibility: The visibility to transition to
:param float duration: Time for action to complete in seconds
:param int offset: A number of tabs (4 spaces) to add before Python logic
strings"""
@property
def start_string(self):
script_text = []
# TODO: Fade out timing appears to be mucked
script_text.extend([
"blender_object.color[3] = int(blender_object.visible)",
"blender_object.setVisible(True)",
"delta_alpha = {} - blender_object.color[3]".format(
int(self.visible)),
"W3D_LOG.debug(",
" 'object {} visibility set to {}'.format(",
" blender_object.name, delta_alpha > 0",
" )",
")",
"blender_object['visible_tag'] = 'delta_alpha > 0'",
"blender_object['visV'] = delta_alpha/{}".format(
("({}*bge.logic.getLogicTicRate())".format(self.duration), 1)[
self.duration == 0])]
)
try:
script_text[0] = "{}{}".format(" "*self.offset, script_text[0])
except IndexError:
return ""
return "\n{}".format(" "*self.offset).join(script_text)
@property
def continue_string(self):
script_text = [
"new_color = blender_object.color",
"new_color[3] += blender_object['visV']",
"blender_object.color = new_color"
]
try:
script_text[0] = "{}{}".format(" "*self.offset, script_text[0])
except IndexError:
return ""
return "\n{}".format(" "*self.offset).join(script_text)
@property
def end_string(self):
script_text = [
"new_color = blender_object.color",
"new_color[3] = {}".format(int(self.visible)),
"blender_object.color = new_color",
"blender_object.setVisible({})".format(self.visible),
"if 'clicks' in blender_object:",
" if blender_object.visible:",
" blender_object['clickable'] = True",
" else:",
" try:",
" del blender_object['clickable']",
" except KeyError:",
" pass # Already unclickable",
]
try:
script_text[0] = "{}{}".format(" "*self.offset, script_text[0])
except IndexError:
return ""
return "\n{}".format(" "*self.offset).join(script_text)
def __init__(self, visibility, duration, offset=0):
self.visible = visibility
self.duration = duration
self.offset = offset
|
gpl-3.0
| 4,423,988,806,805,680,000
| 37.468085
| 79
| 0.568584
| false
| 4.040223
| false
| false
| false
|
igrlas/CentralHub
|
CHPackage/src/centralhub/helpers/data_operations.py
|
1
|
1674
|
def element_dict_to_tuple(my_dict):
"""Of type Element"""
if 'hid' not in my_dict:
my_dict['hid'] = None
if 'name' not in my_dict:
my_dict['name'] = None
if 'type' not in my_dict:
my_dict['type'] = None
if 'state' not in my_dict:
my_dict['state'] = None
if 'override' not in my_dict:
my_dict['override'] = None
if 'temperature' not in my_dict:
my_dict['temperature'] = None
if 'defaultTemperature' not in my_dict:
my_dict['defaultTemperature'] = None
if 'desiredTemperature' not in my_dict:
my_dict['desiredTemperature'] = None
if 'address' not in my_dict:
my_dict['address'] = None
return (my_dict['hid'], my_dict['name'], my_dict['type'], my_dict['state'], my_dict['override'],
my_dict['temperature'], my_dict['defaultTemperature'],my_dict['desiredTemperature'], my_dict['address'])
def tuple_to_dict(properties, values):
"""Any tuple to any dict"""
if properties.__len__() != values.__len__():
raise Exception('number of properties does not match number of values supplied')
final_dict = {}
for i, prop in enumerate(properties):
final_dict[prop] = values[i]
return final_dict
def create_element(hid=None, name=None, type=None, state=None, override=None, temperature=None, defaultTemperature=None, desiredTemperature=None, address=None):
"""of type Element"""
return {'hid': hid, 'name': name, 'type': type, 'state': state, 'override': override, 'temperature': temperature,
'defaultTemperature': defaultTemperature, 'desiredTemperature': desiredTemperature, 'address': address}
|
gpl-2.0
| 4,153,254,492,931,863,600
| 37.045455
| 160
| 0.630824
| false
| 3.72
| false
| false
| false
|
Jbkwok/is210_lesson_02
|
identity.py
|
1
|
1489
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Provides the is_empty() method."""
def get_member_count(my_sequence):
"""Returns the number of members of a list object.
Args:
my_sequence (sequence): The sequence object being measured.
Returns:
mixed: If the object can be measured it returns an integer. If not it
returns ``False``
Examples:
>>> get_member_count(42)
False
>>> get_member_count('duck')
4
>>> get_member_count(['knights', 'who', 'say', 'ni'])
4
"""
try:
length = len(my_sequence)
except TypeError:
length = False
return length
def is_empty(my_sequence):
"""Tests whether or not the passed sequence is empty.
Args:
my_sequence (sequence): The sequence object being measured.
Returns:
bool: If empty, returns True, otherwise, False.
Raises:
TypeError: If my_sequence is not a sequence object type.
Examples:
>>> is_empty('')
True
>>> is_empty('apple')
False
>>> is_empty([])
True
>>> is_empty(42)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: object has no len()
"""
count = get_member_count(my_sequence)
if count != False:
return count == 0
else:
raise TypeError('Object has no len()')
TEST = ''
print len(TEST)
print is_empty(TEST)
|
mpl-2.0
| 3,495,747,279,157,622,000
| 19.971831
| 77
| 0.556749
| false
| 4.11326
| false
| false
| false
|
bschug/poe-loot-gen
|
uniques.py
|
1
|
2853
|
import requests
from collections import defaultdict
import sys
FATED_UNIQUES = {
'Amplification Rod',
'Cragfall',
'Death\'s Opus',
'Deidbellow',
'Doomfletch\'s Prism',
'Ezomyte Hold',
'Hrimburn',
'Hrimnor\'s Dirge',
'Kaltensoul',
'Kaom\'s Way',
'Karui Charge',
'Martyr\'s Crown',
'Ngamahu Tiki',
'Queen\'s Escape',
'Realm Ender',
'Shavronne\'s Gambit',
'Silverbough',
'The Cauteriser',
'The Gryphon',
'The Oak',
'The Signal Fire',
'The Tempest',
'Thirst for Horrors',
'Wall of Brambles',
'Voidheart'
}
def get_unique_prices(league):
unique_prices = defaultdict(lambda: 0)
get_unique_prices_from_url('http://poeninja.azureedge.net/api/Data/GetUniqueWeaponOverview', league, unique_prices)
get_unique_prices_from_url('http://poeninja.azureedge.net/api/Data/GetUniqueArmourOverview', league, unique_prices)
get_unique_prices_from_url('http://poeninja.azureedge.net/api/Data/GetUniqueAccessoryOverview', league, unique_prices)
get_unique_prices_from_url('http://poeninja.azureedge.net/api/Data/GetUniqueFlaskOverview', league, unique_prices)
return unique_prices
def get_unique_prices_from_url(url, league, unique_prices):
response = requests.get(url, {'league': league}).json()
for item in response['lines']:
if item['name'] in FATED_UNIQUES:
continue
unique_prices[item['baseType']] = max(unique_prices[item['baseType']], item['chaosValue'])
def build_filter_code(unique_prices):
worthless, mediocre, valuable, awesome = [], [], [], []
for k, v in unique_prices.items():
if v < 0.5:
worthless.append(k)
elif v < 2:
mediocre.append(k)
elif v < 15:
valuable.append(k)
else:
awesome.append(k)
code = """
# Top Tier Uniques (15c+)
Show
Rarity Unique
BaseType {}
SetBackgroundColor 175 78 17
SetTextColor 0 0 0
SetBorderColor 0 0 0
SetFontSize 45
PlayAlertSound 6 300
# Decent Uniques (2c+)
Show
Rarity Unique
BaseType {}
SetFontSize 45
SetBackgroundColor 70 35 14 220
SetBorderColor 0 0 0
PlayAlertSound 6 300
# Mediocre Uniques (~1c)
Show
Rarity Unique
BaseType {}
SetFontSize 38
# Worthless Uniques (< 2 alch)
Show
Rarity Unique
BaseType {}
SetFontSize 30
# Draw pink border around unknown Uniques
Show
Rarity Unique
SetBorderColor 255 100 255
""".format(
' '.join('"{}"'.format(x) for x in awesome),
' '.join('"{}"'.format(x) for x in valuable),
' '.join('"{}"'.format(x) for x in mediocre),
' '.join('"{}"'.format(x) for x in worthless),
)
return code
if __name__ == '__main__':
league = sys.argv[1]
print(build_filter_code(get_unique_prices(league)))
|
mit
| -7,418,833,182,215,221,000
| 24.702703
| 122
| 0.625657
| false
| 3.128289
| false
| false
| false
|
gblanchard4/viamics
|
framework/modules/blast.py
|
1
|
6151
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2010 - 2011, University of New Orleans
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
# --
#Contains the process for running a viamics analysis using BLAST, using functions and classes from
#framework.tools.blast. This depends on blast databases being stored at constants.blastdb_dir, and having blastn
#blastn and makeblastdb executables on the path
#
#If the blastn or makeblastdb programs are throwing errors, one possible cause is spaces in the path to input
#or output files. I could not for the life of me figure this out (I think the blastn and makeblastdb programs just
#can't handle it), so I just stick underscores in the name the user gives. If Viamics is installed at say
#/home/username/Desktop/My bioinformatics folder/viamics, there could be a problem.
import os
import cPickle
from framework.tools.helper_functions import SerializeToFile, DeserializeFromFile
from framework.tools.logger import debug
from framework.tools import fasta
import framework.constants as c
import framework.tools.blast
import framework.tools.helper_functions as helper_functions
def _preprocess(p, request_dict):
#fasta.stripped specifies an open keyfile object, but all it does is
#"for line in keys" so a list of strings works here. Using a list avoids all
#the nonsense of sending another file from the client.
mode = request_dict.get("qa_mode")
try:
return fasta.fasta_qa_preprocess(
mode,
request_dict.get("data_file_path"),
request_dict.get("codes_primers"),#keyfile. see above
homopolymer_length = request_dict.get("homopolymer_length"))
except:
debug(helper_functions.formatExceptionInfo(), p.files.log_file)
raise
def _exec(p, request_dict):
p.set_analysis_type('blast')
p.threshold = request_dict.get('threshold_dict')
separator = request_dict['seperator']#sic
debug("storing separator: '%s'" % separator, p.files.log_file)
open(p.files.seperator_file_path, 'w').write(separator)
debug("storing DB name: '%s'" % request_dict['db_name'], p.files.log_file)
open(p.files.blast_db_name_path, 'w').write(request_dict['db_name'])
if p.threshold:
debug("storing confidence threshold", p.files.log_file)
with open(p.files.threshold_path,'w') as f:
f.write(cPickle.dumps(p.threshold))
#add length info to legend
num_seqs = helper_functions.get_number_of_lines(p.files.data_file_path) / 2
name = request_dict['db_name']
#run blast on data
blast_db = os.path.join(c.blastdb_dir,name,name)
debug("Extracting QA info", p.files.log_file)
cmt = open(p.files.data_comment_file_path,'w')
for line in open(p.files.data_file_path):
if line.startswith(';'):
cmt.write(line)
cmt.close()
debug(("running blast on %d sequences against database: %s " % (num_seqs, request_dict['db_name'])), p.files.log_file)
framework.tools.blast.run_blastn(p.files.data_file_path, p.files.blast_output_file_path, blast_db,num=1)
samples_dictionary(p)
samples = DeserializeFromFile(p.files.samples_serialized_file_path).keys()
if len(samples) == 0:
msg = 'error: samples dict contains no samples. perhaps no sequences in the query matched the datbase'
debug(msg,p.files.log_file)
raise ValueError(msg)
else:
open(p.files.all_unique_samples_file_path, 'w').write('\n'.join(samples) + '\n')
debug("%d unique sample names stored" % len(samples), p.files.log_file)
otu_library(p)
if hasattr(p,'threshold'):
separate_low_confidence(p)
def samples_dictionary(p):
debug("Computing sample dictionary", p.files.log_file)
db_name = open(p.files.blast_db_name_path).read()
legend_path = os.path.join(c.blastdb_dir,
db_name,db_name+c.blast_legend_file_extension)
samples_dict = framework.tools.blast.create_samples_dictionary(p.files.blast_output_file_path,
legend_path,
open(p.files.seperator_file_path).read(),
thresholds=p.threshold)
debug("Serializing samples dictionary object", p.files.log_file)
SerializeToFile(samples_dict, p.files.samples_serialized_file_path)
def otu_library(p):
debug("Generating OTU Library", p.files.log_file)
db_name = open(p.files.blast_db_name_path).read()
legend_path = os.path.join(c.blastdb_dir,
db_name,db_name+c.blast_legend_file_extension)
otu_library = framework.tools.blast.get_otu_library(p.files.blast_output_file_path,
legend_path,
open(p.files.seperator_file_path).read())
SerializeToFile(otu_library, p.files.otu_library_file_path)
def separate_low_confidence(p):
debug("Separating low confidence sequences", p.files.log_file)
separator = open(p.files.seperator_file_path).read()
lo_seqs = framework.tools.blast.low_confidence_seqs(open(p.files.data_file_path),
open(p.files.blast_output_file_path),
p.threshold,
separator)
with open(p.files.low_confidence_seqs_path,'w') as o:
for s in lo_seqs:
o.write(s)
def _module_functions(p, request_dict):
return {
'blast': {'func': samples_dictionary, 'desc': 'Samples dictionary'},
'blast': {'func': otu_library, 'desc': 'OTU library'}
}
def _sample_map_functions(p, request_dict):
return {}
|
gpl-2.0
| 185,121,696,036,604,600
| 42.624113
| 122
| 0.63323
| false
| 3.631051
| false
| false
| false
|
colonelqubit/libreconverter
|
libreconverter.py
|
1
|
6539
|
#!/usr/bin/python3
#
# Requires Python3
# *Please* make sure to use the version of Python included with
# your copy of LibreOffice.
#
# Convert spreadsheet to CSV file.
#
# Based on:
# PyODConverter (Python OpenDocument Converter) v1.0.0 - 2008-05-05
# Copyright (C) 2008 Mirko Nasato <mirko@artofsolving.com>
# Licensed under the GNU LGPL v2.1 - or any later version.
# http://www.gnu.org/licenses/lgpl-2.1.html
#
import os
import re
import loutils
import uno
from com.sun.star.task import ErrorCodeIOException
class LibreConverter:
"""
Spreadsheet converter class.
Converts spreadsheets to CSV files.
"""
def __init__(self, lorunner=None):
self.desktop = None
self.lorunner = None
def convert(self, inputFile, outputFile, verbose=False):
"""
Convert the input file (a spreadsheet) to a CSV file.
The input file name can contain a sheet specification to specify a particular sheet.
The sheet specification is either a number or a sheet name.
The sheet specification is appended to the file name separated by a colon
or an at sign: ":" or "@".
If the output file name contains a %d or %s format specifier, then all the sheets
in the input file are converted, otherwise only the first sheet is converted.
If the output file name contains a %d format specifier then the sheet number
is used when formatting the output file name.
The format can contain a width specifier (eg %02d).
If the output file name contains a %s specifier then the sheet name is used
when formatting the output file name.
"""
# Start openoffice if needed.
if not self.desktop:
if not self.lorunner:
self.lorunner = loutils.LORunner()
self.desktop = self.lorunner.connect()
# Check for sheet specification in input file name.
match = re.search(r'^(.*)[@:](.*)$', inputFile)
if os.path.exists(inputFile) or not match:
inputUrl = uno.systemPathToFileUrl(os.path.abspath(inputFile))
inputSheet = '1' # Convert fist sheet.
else:
inputUrl = uno.systemPathToFileUrl(os.path.abspath(match.group(1)))
inputSheet = match.group(2)
# NOTE:
# Sheet activation does not work properly when Hidden is specified.
# Although the sheet does become the active sheet, it's not the sheet that
# gets saved if the spreadsheet is loaded with Hidden=True.
#
# Removing Hidden=True doesn't seem to change anything: nothing appears
# on the screen regardless of the Hidden value.
#
# document = self.desktop.loadComponentFromURL(inputUrl, "_blank", 0, loutils.lo_properties(Hidden=True))
document = self.desktop.loadComponentFromURL(inputUrl, "_blank", 0, loutils.lo_properties())
try:
props = loutils.lo_properties(FilterName="Text - txt - csv (StarCalc)")
#
# Another useful property option:
# FilterOptions="59,34,0,1"
# 59 - Field separator (semicolon), this is the ascii value.
# 34 - Text delimiter (double quote), this is the ascii value.
# 0 - Character set (system).
# 1 - First line number to export.
#
# For more information see:
# http://wiki.services.openoffice.org/wiki/Documentation/DevGuide/Spreadsheets/Filter_Options
# To convert a particular sheet, the sheet needs to be active.
# To activate a sheet we need the spreadsheet-view, to get the spreadsheet-view
# we need the spreadsheet-controller, to get the spreadsheet-controller
# we need the spreadsheet-model.
#
# The spreadsheet-model interface is available from the document object.
# The spreadsheet-view interface is available from the controller.
#
controller = document.getCurrentController()
sheets = document.getSheets()
# If the output file name contains a %d or %s format specifier, convert all sheets.
# Use the sheet number if the format is %d, otherwise the sheet name.
dfmt = re.search(r'%[0-9]*d', outputFile)
sfmt = re.search(r'%s', outputFile)
if dfmt or sfmt:
i = 0
while i < sheets.getCount():
# Activate the sheet.
sheet = sheets.getByIndex(i)
controller.setActiveSheet(sheet)
# Create output file name.
if dfmt:
ofile = outputFile % (i+1)
else:
ofile = outputFile % sheet.getName().replace(' ', '_')
if verbose: print( " %s" % ofile)
# Save the sheet to the output file.
outputUrl = uno.systemPathToFileUrl(os.path.abspath(ofile))
document.storeToURL(outputUrl, props)
i += 1
else:
# Activate the sheet to be converted.
if re.search(r'^\d+$', inputSheet):
sheet = sheets.getByIndex(int(inputSheet)-1)
else:
sheet = sheets.getByName(inputSheet)
controller.setActiveSheet(sheet)
outputUrl = uno.systemPathToFileUrl(os.path.abspath(outputFile))
document.storeToURL(outputUrl, props)
finally:
if document: document.close(True)
if __name__ == "__main__":
from sys import argv
from os.path import isfile
if len(argv) == 2 and argv[1] == '--shutdown':
loutils.lo_shutdown_if_running()
else:
if len(argv) < 3 or len(argv) % 2 != 1:
print("USAGE:")
print(" python %s INPUT-FILE[:SHEET] OUTPUT-FILE ..." % argv[0])
print("OR")
print(" python %s --shutdown" % argv[0])
exit(255)
try:
i = 1
converter = LibreConverter()
while i+1 < len(argv):
print('%s => %s' % (argv[i], argv[i+1]))
converter.convert(argv[i], argv[i+1], True)
i += 2
except ErrorCodeIOException as exception:
print("ERROR! ErrorCodeIOException %d" % exception.ErrCode)
exit(1)
|
lgpl-2.1
| 6,492,071,722,650,483,000
| 37.017442
| 114
| 0.577764
| false
| 4.21599
| false
| false
| false
|
wait4pumpkin/tmall
|
solution/analysis/single_repeat.py
|
1
|
5277
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import random
import glob
import os
import sys
import time
import math
import numpy
import pylab
from collections import Counter
from svmutil import *
N_MONTH = 4
N_DAY_PER_MONTH = 31
BASE_MONTH = 4
TYPE_LENGTH = 4
class User(object):
def __init__(self, id, info):
self.id = id;
self.brands = info.keys()
self.data = dict()
self.day = dict()
self.label = set()
for brandID in self.brands:
brand = info[brandID]
for month, day, action in brand:
p = (month - BASE_MONTH) * 12
if day > 10:
p += 4
elif day > 20:
p += 8
if action == 1:
if month >= BASE_MONTH + N_MONTH - 1:
self.label.add(brandID)
else:
if brandID not in self.data:
self.data[brandID] = 0
self.day[brandID] = []
self.data[brandID] += 1
self.day[brandID].append(day + (month - BASE_MONTH) * N_DAY_PER_MONTH)
self.data = sorted(self.data.items(), key=lambda e: e[1], reverse=True)
self.period_brand = set()
for brand, days in self.day.items():
days.sort()
wait = [days[idx+1] - days[idx] for idx in range(len(days)-1)]
repeat = [num for num in wait if num > 0]
if len(repeat) > 0:
if days[-1] < (N_MONTH - 2) * N_DAY_PER_MONTH:
if len(repeat) > 2 or sum(repeat) > 10:
self.period_brand.add(brand)
print repeat
else:
self.period_brand.add(brand)
print '!', repeat
def __str__(self):
return str(self.id) + ' ' + str(len(self.bands))
if __name__ == '__main__':
userInfo = dict()
with open('/home/pumpkin/Documents/project/tmall/dataset/t_alibaba_data.csv', 'rb') as csvfile:
user_table = dict()
brand_table = dict()
user_counter = 0
brand_counter = 0
reader = csv.reader(csvfile, delimiter=',')
for row in reader:
userID, brandID, actionType, month, day = [int(field) for field in row]
if not userID in user_table:
user_table[userID] = user_counter
user_counter += 1
if not brandID in brand_table:
brand_table[brandID] = brand_counter
brand_counter += 1
userID = user_table[userID]
brandID = brand_table[brandID]
if not userID in userInfo:
userInfo[userID] = dict()
user = userInfo[userID]
if brandID not in user:
user[brandID] = []
if month in (4, 5, 6):
day = day - 14
else:
day = day - 15
if day <= 0:
month -= 1
day += 31
band = user[brandID]
band.append((month, day, actionType))
users = []
for (userID, info) in userInfo.iteritems():
users.append(User(userID, info))
counter = 0
for user in users:
if len(user.data) <= 0:
continue
if user.data[0][1] > 1:
counter += 1
print counter, '{:.2f}%'.format(float(counter) / len(users) * 100)
# counter = 0
# for user in users:
# if len(user.data) <= 0 or user.data[0][1] < 2:
# continue
# flag = False
# for brand, time in user.data:
# if time < 2:
# break
# day = sorted(user.day[brand])
# wait = [day[idx+1] - day[idx] for idx in range(len(day)-1)]
# if len([num for num in wait if num > 0]) > 0:
# flag = True
# repeat = [num for num in wait if num > 0]
# if day[-1] < (N_MONTH - 1) * N_DAY_PER_MONTH:
# if len(repeat) < 3 and sum(repeat) < 10:
# flag = False
# else:
# print repeat
# if flag:
# counter += 1
# print '================================================================'
# print counter, '{:.2f}%'.format(float(counter) / len(users) * 100)
pBands = []
bBands = []
hitBands = []
for user in users:
bBands.append(len(user.label))
hit = 0
total = len(user.period_brand)
for predict in user.period_brand:
if predict in user.label:
hit += 1
hitBands.append(hit)
pBands.append(total)
print sum(hitBands), ' ', sum(pBands), ' ', sum(bBands)
precision = float(sum(hitBands)) / sum(pBands) if not sum(pBands) == 0 else 0
recall = float(sum(hitBands)) / sum(bBands) if not sum(bBands) == 0 else 0
f1 = (2 * precision * recall) / (precision + recall) if not precision + recall == 0 else 0
print 'All: %.02f%% (Precision) %.02f%% (Recall) %.02f%% (F1)' % (precision * 100, recall * 100, f1 * 100)
|
mit
| -2,505,034,783,997,640,000
| 29.686047
| 111
| 0.465795
| false
| 3.761226
| false
| false
| false
|
listyque/TACTIC-Handler
|
thlib/tactic_server.py
|
1
|
2467
|
# tactic_api_client.py
# Start here to run client for tactic api
import sys
import datetime
from thlib.side.Qt import QtWidgets as QtGui
from thlib.side.Qt import QtCore as QtCore
from thlib.side.Qt import QtNetwork as QtNetwork
import main_standalone
import thlib.global_functions as gf
from thlib.environment import env_mode, env_inst, dl
import thlib.ui_classes.ui_tactic_server_classes as ui_tactic_server_classes
class QSingleApplication(QtGui.QApplication):
def start_single(self, main_window):
self.main_window = main_window
# Creating local Socket
self.socket = QtNetwork.QLocalSocket()
# socket Actions
self.socket.connected.connect(self.connected_to_server)
self.socket.error.connect(self.start_app)
# Trying to connect to existing, previeous executed server
self.socket.connectToServer(self.applicationName(), QtCore.QIODevice.ReadOnly)
def connected_to_server(self):
sys.exit()
def start_app(self):
self.server = QtNetwork.QLocalServer()
listen = self.server.listen(self.applicationName())
if listen:
self.server.newConnection.connect(self.handle_new_connections)
else:
QtGui.QMessageBox.critical(None, self.tr('Error'), self.tr('Error listening the socket.'))
self.main_window.create_ui()
def handle_new_connections(self):
print('Checking for the Server is Up')
incom_socket = self.server.nextPendingConnection()
incom_socket.readyRead.connect(lambda: self.readSocket(incom_socket))
def readSocket(self, new_socket):
new_socket.waitForReadyRead(20000)
new_socket.readAll()
@gf.catch_error
def startup():
env_inst.ui_super = QSingleApplication(sys.argv)
env_inst.ui_super.setApplicationName('TacticHandler_TacticApiServer')
if env_mode.qt5:
env_inst.ui_super.setStyle('fusion')
else:
env_inst.ui_super.setStyle('plastique')
env_mode.set_mode('api_server')
date_str = datetime.date.strftime(dl.session_start, '%d_%m_%Y_%H_%M_%S')
stdout_path = u'{0}/log/api_server_stdout_{1}.log'.format(env_mode.get_current_path(), date_str)
sys.stdout = open(stdout_path, 'w')
main_standalone.setPaletteFromDct(main_standalone.palette)
env_inst.ui_super.start_single(ui_tactic_server_classes.Ui_TacticServer())
sys.exit(env_inst.ui_super.exec_())
if __name__ == '__main__':
startup()
|
epl-1.0
| 4,327,361,219,604,347,400
| 29.45679
| 102
| 0.691528
| false
| 3.509246
| false
| false
| false
|
srio/shadow3-scripts
|
HIGHLIGHTS/occupation.py
|
1
|
3634
|
from orangecontrib.comsyl.util.CompactAFReader import CompactAFReader
# from CompactAFReader import CompactAFReader
import numpy
from srxraylib.plot.gol import plot_image, plot
# from plot_color import plot_with_transparency_one
import pylab as plt
from matplotlib.colors import Normalize, ListedColormap
import matplotlib.patches as patches
def convert_to_h5(file_from,file_to):
af = CompactAFReader.initialize_from_file(file_from)
af.write_h5(file_to)
print("File written to disk: ",file_to)
if __name__ == "__main__":
# filename_ebs = "/scisoft/data/srio/COMSYL/ID16/id16s_ebs_u18_1400mm_1h_new_s1.0.npy"
# filename_ebs = "/scisoft/data/srio/COMSYL/CALCULATIONS/cs_new_u18_2m_1h_s2.5.h5" # NOT GOOD
# convert_to_h5("/scisoft/users/glass/Documents/sources/Orange-SRW/comsyl/calculations/cs_new_u18_2m_1h_s2.5.npz",
# "cs_new_u18_2m_1h_s2.5.h5")
# convert_to_h5("/scisoft/users/glass/Documents/sources/Orange-SRW/comsyl/calculations/cl_low_beta_u18_2m_1h_s6.5.npy",
# "cl_low_beta_u18_2m_1h_s6.5.h5")
# filename_ebs = "cs_new_u18_2m_1h_s2.5.h5"
# filename_ebs = "cl_low_beta_u18_2m_1h_s6.5.h5"
# filename_ebs = "/scisoft/users/glass/Documents/sources/Orange-SRW/comsyl/calculations/new_u18_2m_1h_ts_s2.0.npz"
filename_ebs = "/scisoft/users/glass/Documents/sources/Orange-SRW/comsyl/calculations/cs_new_u18_2m_1h_s2.5.npz" # OK EBS
filename_lb = "/scisoft/users/glass/Documents/sources/Orange-SRW/comsyl/calculations/cl_low_beta_u18_2m_1h_s6.5.npy" # OK LB
filename_hb = "/scisoft/users/glass/Documents/sources/Orange-SRW/comsyl/calculations/cl_high_beta_u18_2m_1h_s2.0.npy"
#
# load CSD
#
af_ebs = CompactAFReader.initialize_from_file(filename_ebs)
cumulated_occupation_ebs = af_ebs.cumulated_occupation_array()
occupation_ebs = af_ebs.occupation_array()
af_lb = CompactAFReader.initialize_from_file(filename_lb)
cumulated_occupation_lb = af_lb.cumulated_occupation_array()
occupation_lb = af_lb.occupation_array()
af_hb = CompactAFReader.initialize_from_file(filename_hb)
cumulated_occupation_hb = af_hb.cumulated_occupation_array()
occupation_hb = af_hb.occupation_array()
#
print("Coherent fraction EBS: ",cumulated_occupation_ebs[0])
print("Coherent fraction LB: ",cumulated_occupation_lb[0])
print("Coherent fraction HB: ",cumulated_occupation_hb[0])
extensions = ["ebs","lb","hb"]
data = [cumulated_occupation_ebs,cumulated_occupation_lb,cumulated_occupation_hb]
data_occ = [occupation_ebs,occupation_lb,occupation_hb]
plot(numpy.arange(cumulated_occupation_ebs.size),cumulated_occupation_ebs,
numpy.arange(cumulated_occupation_lb.size),cumulated_occupation_lb,
numpy.arange(cumulated_occupation_hb.size),cumulated_occupation_hb,
legend=extensions)
for i,extension in enumerate(extensions):
f = open("cumulated_occupation_%s.dat"%extension,'w')
data_i = data[i]
for j in range(data_i.size):
f.write("%d %g \n"%(j,data_i[j]))
f.close()
print("File written to disk: cumulated_occupation_%s.dat"%extension)
f = open("occupation_%s.dat"%extension,'w')
data_i = data_occ[i]
for j in range(data_i.size):
f.write("%d %g \n"%(j,data_i[j]))
f.close()
print("File written to disk: occupation_%s.dat"%extension)
#
# get indices
#
# first propagate a few modes only to check there are no errors
# afp = AFpropagated.propagate(af,distance=distance,index_max=1,zoom=zoom)
|
mit
| 2,380,184,108,273,730,000
| 34.281553
| 128
| 0.682168
| false
| 2.691852
| false
| false
| false
|
erilyth/PyGame-Learning-Environment
|
ple/games/flappybird/__init__.py
|
1
|
13533
|
import os
import sys
import numpy as np
import pygame
from pygame.constants import K_w
from .. import base
class BirdPlayer(pygame.sprite.Sprite):
def __init__(self,
SCREEN_WIDTH, SCREEN_HEIGHT, init_pos,
image_assets, rng, color="red", scale=1.0):
self.SCREEN_WIDTH = SCREEN_WIDTH
self.SCREEN_HEIGHT = SCREEN_HEIGHT
self.image_order = [0, 1, 2, 1]
#done image stuff
pygame.sprite.Sprite.__init__(self)
self.image_assets = image_assets
self.init(init_pos, color)
self.height = self.image.get_height()
self.scale = scale
#all in terms of y
self.vel = 0
self.FLAP_POWER = 9*self.scale
self.MAX_DROP_SPEED = 10.0
self.GRAVITY = 1.0*self.scale
self.rng = rng
self._oscillateStartPos() #makes the direction and position random
self.rect.center = (self.pos_x, self.pos_y) #could be done better
def init(self, init_pos, color):
#set up the surface we draw the bird too
self.flapped = True #start off w/ a flap
self.current_image = 0
self.color = color
self.image = self.image_assets[self.color][self.current_image]
self.rect = self.image.get_rect()
self.thrust_time = 0.0
self.tick = 0
self.pos_x = init_pos[0]
self.pos_y = init_pos[1]
def _oscillateStartPos(self):
offset = 8*np.sin( self.rng.rand() * np.pi )
self.pos_y += offset
def flap(self):
if self.pos_y > -2.0*self.image.get_height():
self.vel = 0.0
self.flapped = True
def update(self, dt):
self.tick += 1
#image cycle
if (self.tick + 1) % 15 == 0:
self.current_image += 1
if self.current_image >= 3:
self.current_image = 0
#set the image to draw with.
self.image = self.image_assets[self.color][self.current_image]
self.rect = self.image.get_rect()
if self.vel < self.MAX_DROP_SPEED and self.thrust_time == 0.0:
self.vel += self.GRAVITY
#the whole point is to spread this out over the same time it takes in 30fps.
if self.thrust_time+dt <= (1.0/30.0) and self.flapped:
self.thrust_time += dt
self.vel += -1.0*self.FLAP_POWER
else:
self.thrust_time = 0.0
self.flapped = False
self.pos_y += self.vel
self.rect.center = (self.pos_x, self.pos_y)
def draw(self, screen):
screen.blit(self.image, self.rect.center)
class Pipe(pygame.sprite.Sprite):
def __init__(self,
SCREEN_WIDTH, SCREEN_HEIGHT, gap_start, gap_size, image_assets, scale,
offset=0, color="green"):
self.speed = 4.0*scale
self.SCREEN_WIDTH = SCREEN_WIDTH
self.SCREEN_HEIGHT = SCREEN_HEIGHT
self.image_assets = image_assets
#done image stuff
self.width = self.image_assets["green"]["lower"].get_width()
pygame.sprite.Sprite.__init__(self)
self.image = pygame.Surface((self.width, self.SCREEN_HEIGHT))
self.image.set_colorkey((0,0,0))
self.init(gap_start, gap_size, offset, color)
def init(self, gap_start, gap_size, offset, color):
self.image.fill((0,0,0))
self.gap_start = gap_start
self.x = self.SCREEN_WIDTH+self.width+offset
self.lower_pipe = self.image_assets[color]["lower"]
self.upper_pipe = self.image_assets[color]["upper"]
top_bottom = gap_start-self.upper_pipe.get_height()
bottom_top = gap_start+gap_size
self.image.blit(self.upper_pipe, (0, top_bottom ))
self.image.blit(self.lower_pipe, (0, bottom_top ))
self.rect = self.image.get_rect()
self.rect.center = (self.x, self.SCREEN_HEIGHT/2)
def update(self, dt):
self.x -= self.speed
self.rect.center = (self.x, self.SCREEN_HEIGHT/2)
class Backdrop():
def __init__(self, SCREEN_WIDTH, SCREEN_HEIGHT, image_background, image_base, scale):
self.SCREEN_WIDTH = SCREEN_WIDTH
self.SCREEN_HEIGHT = SCREEN_HEIGHT
self.background_image = image_background
self.base_image = image_base
self.x = 0
self.speed = 4.0*scale
self.max_move = self.base_image.get_width() - self.background_image.get_width()
def update_draw_base(self, screen, dt):
#the extra is on the right
if self.x > -1*self.max_move:
self.x -= self.speed
else:
self.x = 0
screen.blit(self.base_image, (self.x, self.SCREEN_HEIGHT*0.79))
def draw_background(self, screen):
screen.blit(self.background_image, (0,0))
class FlappyBird(base.Game):
"""
Used physics values from sourabhv's `clone`_.
.. _clone: https://github.com/sourabhv/FlapPyBird
Parameters
----------
width : int (default: 288)
Screen width. Consistent gameplay is not promised for different widths or heights, therefore the width and height should not be altered.
height : inti (default: 512)
Screen height.
pipe_gap : int (default: 100)
The gap in pixels left between the top and bottom pipes.
"""
def __init__(self, width=288, height=512, pipe_gap=100):
actions = {
"up": K_w
}
fps = 30
base.Game.__init__(self, width, height, actions=actions)
self.scale = 30.0/fps
self.allowed_fps = 30 #restrict the fps
self.pipe_gap = 100
self.pipe_color = "red"
self.images = {}
#so we can preload images
pygame.display.set_mode((1,1), pygame.NOFRAME)
self._dir_ = os.path.dirname(os.path.abspath(__file__))
self._asset_dir = os.path.join( self._dir_, "assets/" )
self._load_images()
self.pipe_offsets = [0, self.width*0.5, self.width]
self.init_pos = (
int( self.width * 0.2),
int( self.height / 2 )
)
self.pipe_min = int(self.pipe_gap/4)
self.pipe_max = int(self.height*0.79*0.6 - self.pipe_gap/2)
self.backdrop = None
self.player = None
self.pipe_group = None
def _load_images(self):
#preload and convert all the images so its faster when we reset
self.images["player"] = {}
for c in ["red", "blue", "yellow"]:
image_assets = [
os.path.join( self._asset_dir, "%sbird-upflap.png" % c ),
os.path.join( self._asset_dir, "%sbird-midflap.png" % c ),
os.path.join( self._asset_dir, "%sbird-downflap.png" % c ),
]
self.images["player"][c] = [ pygame.image.load(im).convert_alpha() for im in image_assets ]
self.images["background"] = {}
for b in ["day", "night"]:
path = os.path.join( self._asset_dir, "background-%s.png" % b )
self.images["background"][b] = pygame.image.load(path).convert()
self.images["pipes"] = {}
for c in ["red", "green"]:
path = os.path.join( self._asset_dir, "pipe-%s.png" % c )
self.images["pipes"][c] = {}
self.images["pipes"][c]["lower"] = pygame.image.load(path).convert_alpha()
self.images["pipes"][c]["upper"] = pygame.transform.rotate(self.images["pipes"][c]["lower"], 180)
path = os.path.join( self._asset_dir, "base.png" )
self.images["base"] = pygame.image.load(path).convert()
def init(self):
if self.backdrop is None:
self.backdrop = Backdrop(
self.width,
self.height,
self.images["background"]["day"],
self.images["base"],
self.scale
)
if self.player is None:
self.player = BirdPlayer(
self.width,
self.height,
self.init_pos,
self.images["player"],
self.rng,
color="red",
scale=self.scale
)
if self.pipe_group is None:
self.pipe_group = pygame.sprite.Group([
self._generatePipes(offset=-75),
self._generatePipes(offset=-75+self.width/2),
self._generatePipes(offset=-75+self.width*1.5)
])
color = self.rng.choice(["day", "night"])
self.backdrop.background_image = self.images["background"][color]
#instead of recreating
color = self.rng.choice(["red", "blue", "yellow"])
self.player.init(self.init_pos, color)
self.pipe_color = self.rng.choice(["red", "green"])
for i,p in enumerate(self.pipe_group):
self._generatePipes(offset=self.pipe_offsets[i], pipe=p)
self.score = 0.0
self.lives = 1
self.tick = 0
def getGameState(self):
"""
Gets a non-visual state representation of the game.
Returns
-------
dict
* player y position.
* players velocity.
* next pipe distance to player
* next pipe top y position
* next pipe bottom y position
* next next pipe distance to player
* next next pipe top y position
* next next pipe bottom y position
See code for structure.
"""
pipes = []
for p in self.pipe_group:
if p.x > self.player.pos_x:
pipes.append((p, p.x - self.player.pos_x))
sorted(pipes, key=lambda p: p[1])
next_pipe = pipes[1][0]
next_next_pipe = pipes[0][0]
if next_next_pipe.x < next_pipe.x:
next_pipe, next_next_pipe = next_next_pipe, next_pipe
state = {
"player_y": self.player.pos_y,
"player_vel": self.player.vel,
"next_pipe_dist_to_player": next_pipe.x - self.player.pos_x,
"next_pipe_top_y": next_pipe.gap_start,
"next_pipe_bottom_y": next_pipe.gap_start+self.pipe_gap,
"next_next_pipe_dist_to_player": next_next_pipe.x - self.player.pos_x,
"next_next_pipe_top_y": next_next_pipe.gap_start,
"next_next_pipe_bottom_y": next_next_pipe.gap_start+self.pipe_gap
}
return state
def getScore(self):
return self.score
def _generatePipes(self, offset=0, pipe=None):
start_gap = self.rng.random_integers(
self.pipe_min,
self.pipe_max
)
if pipe == None:
pipe = Pipe(
self.width,
self.height,
start_gap,
self.pipe_gap,
self.images["pipes"],
self.scale,
color=self.pipe_color,
offset=offset
)
return pipe
else:
pipe.init(start_gap, self.pipe_gap, offset, self.pipe_color)
def _handle_player_events(self):
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
key = event.key
if key == self.actions['up']:
self.player.flap()
def game_over(self):
return self.lives <= 0
def step(self, dt):
self.tick += 1
dt = dt / 1000.0
self.score += self.rewards["tick"]
#handle player movement
self._handle_player_events()
for p in self.pipe_group:
hit = pygame.sprite.spritecollide(self.player, self.pipe_group, False)
for h in hit: #do check to see if its within the gap.
top_pipe_check = ((self.player.pos_y - self.player.height/2) <= h.gap_start)
bot_pipe_check = ((self.player.pos_y + self.player.height) > h.gap_start+self.pipe_gap)
if top_pipe_check:
self.lives -= 1
if bot_pipe_check:
self.lives -= 1
#is it past the player?
if (p.x - p.width/2) <= self.player.pos_x < (p.x - p.width/2 + 4):
self.score += self.rewards["positive"]
#is out out of the screen?
if p.x < -p.width:
self._generatePipes(offset=self.width*0.2, pipe=p)
#fell on the ground
if self.player.pos_y >= 0.79*self.height - self.player.height:
self.lives -= 1
#went above the screen
if self.player.pos_y < -self.player.height:
self.lives -= 1
self.player.update(dt)
self.pipe_group.update(dt)
if self.lives <= 0:
self.score += self.rewards["loss"]
self.backdrop.draw_background(self.screen)
self.pipe_group.draw(self.screen)
self.backdrop.update_draw_base(self.screen, dt)
self.player.draw(self.screen)
|
mit
| -4,636,007,687,599,830,000
| 30.767606
| 144
| 0.522205
| false
| 3.699563
| false
| false
| false
|
12425/pac-maker
|
pac-maker.py
|
1
|
5031
|
#!/usr/bin/env python3
# vim: fileencoding=utf-8
import os
import re
from base64 import standard_b64decode as b64decode
from os.path import dirname, isfile, expanduser
from configparser import ConfigParser
from urllib.request import urlopen
HOST_PAT = re.compile(r'^[\w-]+(\.[\w-]+)+$')
PORT_PAT = re.compile(r':\d+$')
def fetch_list(path, decode=False):
if path.startswith('http'):
with urlopen(path) as res:
content = res.read()
else:
with open(expanduser(path), 'rb') as f:
content = f.read()
if decode:
content = b64decode(content)
return content.decode('utf8')
def parse_list(content, gfw=True):
exc = set()
inc = set()
for line in content.split('\n'):
line = line.strip()
if not line:
continue
add_line(line, gfw, inc, exc)
inc -= exc
return inc, exc
def merge_list(inc1, exc1, inc2, exc2):
inc = ((inc1 - exc1) | inc2) - exc2
exc = (exc1 - inc2) | exc2
inc -= exc
return inc, exc
def add_line(line, gfw, inc, exc):
if line.startswith('!'):
return
negative = False
if line.startswith('@@'):
negative = True
line = line[2:]
if line.startswith('||'):
parse_double_pipe(line[2:], negative, gfw, inc, exc)
elif line.startswith('|'):
parse_single_pipe(line[1:], negative, gfw, inc, exc)
else:
parse_normal(line, negative, gfw, inc, exc)
def parse_double_pipe(line, negative, gfw, inc, exc):
line = line.replace('*', '')
if line.startswith('.'):
line = line[1:]
if line.endswith('.^'):
return
if line.endswith('/') or line.endswith('^') or line.endswith('.'):
line = line[:-1]
if '..' in line:
return
if not gfw:
if '/' in line or '^' in line or '$' in line:
return
if PORT_PAT.search(line):
return
if HOST_PAT.match(line):
if negative:
exc.add(line)
else:
inc.add(line)
return
print('|| Format not recognized:', line)
def parse_single_pipe(line, negative, gfw, inc, exc):
if line.startswith('http://'):
line = line[7:]
elif line.startswith('https://'):
line = line[8:]
if not gfw:
if '$' in line:
return
line = line.replace('*', '')
if line.startswith('/') or '..' in line:
return
if line.startswith('.'):
line = line[1:]
if line.endswith('.'):
line = line[:-1]
if line.endswith('/') or line.endswith('^'):
line = line[:-1]
if gfw:
line = line.split('/', 1)[0]
else:
if '/' in line:
return
if '.' not in line:
return
if HOST_PAT.match(line):
if negative:
exc.add(line)
else:
inc.add(line)
return
print('| Format not recognized:', line)
def parse_normal(line, negative, gfw, inc, exc):
line = line.replace('*', '')
if line.startswith('.'):
line = line[1:]
if line.endswith('/') or line.endswith('^'):
line = line[:-1]
elif line.endswith('%2F'):
line = line[:-3]
if line.startswith('?') or '&' in line:
return
if gfw:
line = line.split('/', 1)[0]
else:
if line.endswith('.'):
line = line[:-1]
if '/' in line or '#' in line or '$' in line or '?' in line:
return
if HOST_PAT.match(line):
if negative:
exc.add(line)
else:
inc.add(line)
return
if line == 'http:':
return
if line.startswith('[AutoProxy ') or line.startswith('[Adblock Plus '):
return
if '.' not in line:
return
if PORT_PAT.search(line):
return
print('Format not recognized:', line)
def generate_pac_file(adblist, gfwlist):
global conf
pacfile = next(iter(conf['pac_file']))
with open('pac.js', encoding='utf8') as fi:
pac = fi.read()
pac = pac.replace('$ADBLIST', dict_to_object(adblist))
pac = pac.replace('$GFWLIST', dict_to_object(gfwlist))
with open(expanduser(pacfile), 'w', encoding='utf8') as fo:
fo.write(pac)
print(pacfile, 'generated.')
def dict_to_object(l):
return ',\n '.join(('"%s":1' % x for x in l))
def load_conf(ini):
if not isfile(ini):
L.error('Config file does not exist: %s', ini)
return
conf = ConfigParser(delimiters=('='), allow_no_value=True)
conf.read(ini, 'utf8')
return dict(conf)
if __name__ == '__main__':
os.chdir(dirname(__file__))
global conf
conf = load_conf('conf-pac-maker.ini')
if not conf:
exit()
# gfwlist
inc = set()
exc = set()
for f in conf['gfwlist'].keys():
l = fetch_list(f, decode=True)
inc2, exc2 = parse_list(l, True)
inc, exc = merge_list(inc, exc, inc2, exc2)
for f in conf['my_gfwlist'].keys():
l = fetch_list(f)
inc2, exc2 = parse_list(l, True)
inc, exc = merge_list(inc, exc, inc2, exc2)
gfwlist = inc - exc
# adblocklist
inc.clear()
exc.clear()
for f in conf['adblocklist'].keys():
l = fetch_list(f)
inc2, exc2 = parse_list(l, False)
inc, exc = merge_list(inc, exc, inc2, exc2)
for f in conf['my_adblocklist'].keys():
l = fetch_list(f)
inc2, exc2 = parse_list(l, False)
inc, exc = merge_list(inc, exc, inc2, exc2)
adblist = inc - exc
generate_pac_file(adblist, gfwlist - adblist)
|
bsd-3-clause
| -103,067,667,032,438,510
| 24.538071
| 73
| 0.595309
| false
| 3.111317
| false
| false
| false
|
mjs/juju
|
acceptancetests/update_lxc_cache.py
|
1
|
7813
|
#!/usr/bin/python
"""Update the lxc 'download' template cache for hosts on closed networks."""
from __future__ import print_function
from argparse import ArgumentParser
from collections import namedtuple
import errno
import os
import sys
import traceback
import shutil
import subprocess
import urllib2
SITE = 'https://images.linuxcontainers.org'
INDEX_PATH = 'meta/1.0'
INDEX = 'index-system'
ROOTFS = 'rootfs.tar.xz'
META = 'meta.tar.xz'
LXC_CACHE = '/var/cache/lxc/download'
System = namedtuple(
'System', ['dist', 'release', 'arch', 'variant', 'version', 'path'])
PUT_SCRIPT = """\
scp {rootfs_path} {meta_path} {user_host}:~/
"""
INSTALL_SCRIPT = """\
ssh {user_host} bash <<"EOT"
sudo mkdir -p {lxc_cache}
sudo mv ~/{rootfs} ~/{meta} {lxc_cache}
sudo chown -R root:root {lxc_cache}
sudo tar -C {lxc_cache} -xf {lxc_cache}/meta.tar.xz
EOT
"""
class LxcCache:
"""Manage the LXC download template cache."""
def __init__(self, workspace, verbose=False, dry_run=False):
"""Set the workspace for the local cache."""
self.workspace = os.path.abspath(workspace)
self.verbose = verbose
self.dry_run = dry_run
local_path = os.path.join(self.workspace, INDEX_PATH, INDEX)
self.systems, ignore = self.init_systems(local_path)
def init_systems(self, location):
"""Return a tuple of the dict of lxc Systems and the source data.
A System has these attributes: 'dist', 'release', 'arch', 'variant',
'version', and 'path'. The dict keys are a tuple of
(dist, release, arch, variant).
"""
systems = {}
if location.startswith('http'):
request = urllib2.Request(location)
response = urllib2.urlopen(request)
data = response.read()
else:
try:
with open(location) as f:
data = f.read()
except IOError as e:
if e.errno == errno.ENOENT:
if self.verbose:
print('Local cache is empty.')
return systems, None
for line in data.splitlines():
system = System(*line.split(';'))
key = (system.dist, system.release, system.arch, system.variant)
systems[key] = system
return systems, data
def get_updates(self, dist, release, arch, variant):
"""Return a tuple of the new system and the source data that match.
The new system and source data will be None when there are
no updates. The dist, release, arch, and variant args identify the
system to return.
"""
key = (dist, release, arch, variant)
old_system = self.systems.get(key)
url = '%s/%s/%s' % (SITE, INDEX_PATH, INDEX)
new_systems, data = self.init_systems(url)
new_system = new_systems[key]
if not old_system or new_system.version > old_system.version:
if self.verbose:
print('Found new version for %s' % str(key))
print(new_system.version)
return new_system, data
if self.verbose:
print('Version is current for %s' % str(key))
print(old_system.version)
return None, None
def get_lxc_data(self, system):
"""Download the system image and meta data.
Return a tuple of the image and meta data paths.
"""
image_path = os.path.join(self.workspace, system.path[1:])
if not self.dry_run:
if self.verbose:
print('creating %s' % image_path)
if not os.path.isdir(image_path):
os.makedirs(image_path)
rootfs_path = os.path.join(image_path, ROOTFS)
rootfs_url = '%s%s%s' % (SITE, system.path, ROOTFS)
self.download(rootfs_url, rootfs_path)
meta_path = os.path.join(image_path, META)
meta_url = '%s%s%s' % (SITE, system.path, META)
self.download(meta_url, meta_path)
return rootfs_path, meta_path
def download(self, location, path):
"""Download a large binary from location to the specified path."""
chunk = 16 * 1024
if not self.dry_run:
request = urllib2.Request(location)
response = urllib2.urlopen(request)
if response.getcode() == 200:
with open(path, 'wb') as f:
shutil.copyfileobj(response, f, chunk)
if self.verbose:
print('Downloaded %s' % location)
def put_lxc_data(self, user_host, system, rootfs_path, meta_path):
"""Install the lxc image and meta data on the host.
The user on the host must have password-less sudo.
"""
lxc_cache = os.path.join(
LXC_CACHE, system.dist, system.release, system.arch,
system.variant)
put_script = PUT_SCRIPT.format(
user_host=user_host, rootfs_path=rootfs_path, meta_path=meta_path)
if not self.dry_run:
subprocess.check_call([put_script], shell=True)
if self.verbose:
print("Uploaded %s and %s" % (ROOTFS, META))
install_script = INSTALL_SCRIPT.format(
user_host=user_host, lxc_cache=lxc_cache, rootfs=ROOTFS, meta=META)
if not self.dry_run:
subprocess.check_call([install_script], shell=True)
if self.verbose:
print("Installed %s and %s" % (ROOTFS, META))
def save_index(self, data):
"Save the (current) index data for future calls to get_updates()."
index_dir = os.path.join(self.workspace, INDEX_PATH)
if not os.path.isdir(index_dir):
os.makedirs(index_dir)
index_path = os.path.join(self.workspace, INDEX_PATH, INDEX)
with open(index_path, 'w') as f:
f.write(data)
if self.verbose:
print('saved index: %s' % INDEX)
def parse_args(argv=None):
"""Return the argument parser for this program."""
parser = ArgumentParser(
"Update a remote host's download lxc template cache.")
parser.add_argument(
'-d', '--dry-run', action='store_true', default=False,
help='Do not make changes.')
parser.add_argument(
'-v', '--verbose', action='store_true', default=False,
help='Increase verbosity.')
parser.add_argument(
'--dist', default="ubuntu", help="The distribution to update.")
parser.add_argument(
'--variant', default="default", help="The variant to update.")
parser.add_argument(
'user_host', help='The user@host to update.')
parser.add_argument(
'release', help='The release to update.')
parser.add_argument(
'arch', help='The architecture of the remote host')
parser.add_argument(
'workspace', help='The path to the local dir to stage the update.')
args = parser.parse_args(argv)
return args
def main(argv):
"""Update the lxc download template cache for hosts on closed networks."""
args = parse_args(argv)
try:
lxc_cache = LxcCache(
args.workspace, verbose=args.verbose, dry_run=args.dry_run)
new_system, data = lxc_cache.get_updates(
args.dist, args.release, args.arch, args.variant)
if new_system:
rootfs_path, meta_path = lxc_cache.get_lxc_data(new_system)
lxc_cache.put_lxc_data(
args.user_host, new_system, rootfs_path, meta_path)
lxc_cache.save_index(data)
except Exception as e:
print(e)
print(getattr(e, 'output', ''))
if args.verbose:
traceback.print_tb(sys.exc_info()[2])
return 2
if args.verbose:
print("Done.")
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
agpl-3.0
| -5,232,036,418,743,799,000
| 34.83945
| 79
| 0.588634
| false
| 3.758057
| false
| false
| false
|
fjacob21/MAX
|
service/src/features/tv/eg_tv_feature.py
|
1
|
1266
|
from eg_networksender import Send
class eg_tv_feature(object):
def __init__(self, device):
self._device = device
@property
def name(self):
return 'tv'
@property
def version(self):
return 1
@property
def description(self):
return "Control TV using evenghost receiver"
@property
def functions(self):
return ['open', 'close', 'state']
def execute(self, cmd, params):
if cmd == 'open':
return self.open(params)
if cmd == 'close':
return self.close(params)
if cmd == 'state':
return self.state(params)
def open(self, params):
return {'device':self._device.json, 'feature':self.name, 'result': Send('OpenTV', self._device.ip)}
def close(self, params):
return {'device':self._device.json, 'feature':self.name, 'result': Send('CloseTV', self._device.ip)}
def state(self, params):
if self._device.is_online()['isonline'] == False:
return {'device':self._device.json, 'feature':self.name, 'result': True, 'state': 0}
result = Send('GetState', self._device.ip)
return {'device':self._device.json, 'feature':self.name, 'result': Send('GetState', self._device.ip)}
|
mit
| 1,546,015,063,897,016,300
| 29.142857
| 109
| 0.587678
| false
| 3.801802
| false
| false
| false
|
TheAlgorithms/Python
|
graphs/minimum_spanning_tree_kruskal.py
|
1
|
1393
|
from typing import List, Tuple
def kruskal(num_nodes: int, num_edges: int, edges: List[Tuple[int, int, int]]) -> int:
"""
>>> kruskal(4, 3, [(0, 1, 3), (1, 2, 5), (2, 3, 1)])
[(2, 3, 1), (0, 1, 3), (1, 2, 5)]
>>> kruskal(4, 5, [(0, 1, 3), (1, 2, 5), (2, 3, 1), (0, 2, 1), (0, 3, 2)])
[(2, 3, 1), (0, 2, 1), (0, 1, 3)]
>>> kruskal(4, 6, [(0, 1, 3), (1, 2, 5), (2, 3, 1), (0, 2, 1), (0, 3, 2),
... (2, 1, 1)])
[(2, 3, 1), (0, 2, 1), (2, 1, 1)]
"""
edges = sorted(edges, key=lambda edge: edge[2])
parent = list(range(num_nodes))
def find_parent(i):
if i != parent[i]:
parent[i] = find_parent(parent[i])
return parent[i]
minimum_spanning_tree_cost = 0
minimum_spanning_tree = []
for edge in edges:
parent_a = find_parent(edge[0])
parent_b = find_parent(edge[1])
if parent_a != parent_b:
minimum_spanning_tree_cost += edge[2]
minimum_spanning_tree.append(edge)
parent[parent_a] = parent_b
return minimum_spanning_tree
if __name__ == "__main__": # pragma: no cover
num_nodes, num_edges = list(map(int, input().strip().split()))
edges = []
for _ in range(num_edges):
node1, node2, cost = [int(x) for x in input().strip().split()]
edges.append((node1, node2, cost))
kruskal(num_nodes, num_edges, edges)
|
mit
| 2,064,725,714,210,795,800
| 28.638298
| 86
| 0.498923
| false
| 2.7154
| false
| false
| false
|
andela-ooshodi/django-bucketlist-application
|
djangobucketlist/djangobucketlist/settings/base.py
|
1
|
4073
|
"""
Django settings for djangobucketlist project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.contrib.messages import constants as message_constants
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm(%x1m*2!qs9(l(s&n0nft&$9%3dbpcrc_v#*3cxd7#thj0zbb'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bucketlist',
'apiv1',
'bootstrapform',
'djangobower',
'rest_framework',
'rest_framework.authtoken',
'rest_framework_swagger'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'djangobucketlist.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangobucketlist.wsgi.application'
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
APPEND_SLASH = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, '..', 'bucketlist/static'),
)
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'djangobower.finders.BowerFinder'
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# custom message tag for django messaging middleware
MESSAGE_TAGS = {
message_constants.ERROR: 'danger'
}
# Django REST_FRAMEWORK global settings
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10,
'TEST_REQUEST_DEFAULT_FORMAT': 'json'
}
# Swagger settings
SWAGGER_SETTINGS = {
'exclude_namespaces': [],
'api_version': 'version 1',
}
# Bower configurations
BOWER_INSTALLED_APPS = (
'mdi',
'jquery',
'bootstrap',
)
BOWER_COMPONENTS_ROOT = os.path.join(BASE_DIR, '..', 'bucketlist/static')
# Default database configuration
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'bucketlist-db',
}
}
|
gpl-2.0
| -2,429,033,043,398,538,000
| 24.45625
| 81
| 0.69752
| false
| 3.544822
| false
| false
| false
|
spktklr/kansalaisrajoite
|
python/vote.py
|
1
|
1105
|
# coding=utf-8
from bottle import Bottle, HTTPError
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.orm import joinedload
import model
from utils import jsonplugin
import auth
app = Bottle()
app.install(model.plugin)
app.install(jsonplugin)
@app.get('/<id:int>')
@auth.require_login
def read_one(db, user, id):
try:
item = db.query(model.Restriction) \
.options(joinedload(model.Restriction.voters)) \
.filter_by(id=id).one()
return {'voted': user in item.voters}
except NoResultFound:
return HTTPError(404, 'Not found')
@app.post('/<id:int>')
@auth.require_login
def create(db, user, id):
try:
item = db.query(model.Restriction).filter_by(id=id).one()
item.voters.add(user)
except NoResultFound:
return HTTPError(404, 'Not found')
# Disabled
# @app.delete('/<id:int>')
# @auth.require_login
def delete(db, user, id):
try:
item = db.query(model.Restriction).filter_by(id=id).one()
item.voters.remove(user)
except NoResultFound:
return HTTPError(404, 'Not found')
|
agpl-3.0
| -5,562,420,966,119,401,000
| 23.021739
| 65
| 0.657919
| false
| 3.269231
| false
| false
| false
|
michaelneuder/image_quality_analysis
|
bin/nets/old/pixel_diff_conv_net_double_feed.py
|
1
|
6455
|
#!/usr/bin/env python3
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import numpy as np
np.set_printoptions(threshold=np.nan)
import tensorflow as tf
import time
def convolve_inner_layers(x, W, b):
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='SAME')
y = tf.nn.bias_add(y, b)
return tf.nn.tanh(y)
def convolve_ouput_layer(x, W, b):
y = tf.nn.conv2d(x, W, strides = [1,1,1,1], padding='SAME')
y = tf.nn.bias_add(y, b)
return y
def conv_net(x, W, b):
conv1 = convolve_inner_layers(x, W['weights1'], b['bias1'])
conv2 = convolve_inner_layers(conv1, W['weights2'], b['bias2'])
conv3 = convolve_inner_layers(conv2, W['weights3'], b['bias3'])
output_feed = tf.concat([conv1, conv2, conv3],3)
output = convolve_ouput_layer(output_feed, W['weights_out'], b['bias_out'])
return output
def get_variance(training_target):
all_pixels = training_target.flatten()
return all_pixels.var()
def get_epoch(x, y, n):
input_size = x.shape[0]
number_batches = input_size // n
extra_examples = input_size % n
batches = {}
batch_indices = np.arange(input_size)
np.random.shuffle(batch_indices)
for i in range(number_batches):
temp_indices = batch_indices[n*i:n*(i+1)]
temp_x = []
temp_y = []
for j in temp_indices:
temp_x.append(x[j])
temp_y.append(y[j])
batches[i] = [np.asarray(temp_x), np.asarray(temp_y)]
if extra_examples != 0:
extra_indices = batch_indices[input_size-extra_examples:input_size]
temp_x = []
temp_y = []
for k in extra_indices:
temp_x.append(x[k])
temp_y.append(y[k])
batches[i+1] = [np.asarray(temp_x), np.asarray(temp_y)]
return batches
def main():
# parameters
filter_dim = 7
filter_dim2 = 1
number_images = 100
batch_size = 4
image_dim = 96
input_layer = 2
first_layer = 50
second_layer = 25
third_layer = 10
output_layer = 1
initializer_scale = 10.0
learning_rate = .001
epochs = 130
# seeding for debug purposes --- dont forget to remove
SEED = 12345
np.random.seed(SEED)
tf.set_random_seed(SEED)
print('generating random images ... ')
# train images
rand_img_train_1 = np.random.random_sample((number_images,image_dim**2))
rand_img_train_2 = np.random.random_sample((number_images,image_dim**2))
difference_train = abs(rand_img_train_1 - rand_img_train_2)
# test image
rand_img_test_1 = np.random.random_sample((number_images,image_dim**2))
rand_img_test_2 = np.random.random_sample((number_images,image_dim**2))
difference_test = abs(rand_img_test_1 - rand_img_test_2)
# stacking & reshaping images
train_data = np.reshape(np.dstack((rand_img_train_1, rand_img_train_2)), [number_images,image_dim,image_dim,2])
test_data = np.reshape(np.dstack((rand_img_test_1, rand_img_test_2)), [number_images,image_dim,image_dim,2])
target_data_train = np.reshape(difference_train, [number_images,image_dim,image_dim,1])
target_data_test = np.reshape(difference_test, [number_images,image_dim,image_dim,1])
# initializing variables --- fan in
weights = {
'weights1': tf.Variable(tf.random_normal([filter_dim,filter_dim,input_layer,first_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*input_layer)))),
'weights2': tf.Variable(tf.random_normal([filter_dim2,filter_dim2,first_layer,second_layer],stddev=(1.0/(initializer_scale*filter_dim2*filter_dim2*first_layer)))),
'weights3': tf.Variable(tf.random_normal([filter_dim2,filter_dim2,second_layer,third_layer],stddev=(1.0/(initializer_scale*filter_dim2*filter_dim2*second_layer)))),
'weights_out': tf.Variable(tf.random_normal([filter_dim2,filter_dim2,third_layer+second_layer+first_layer,output_layer],stddev=(1.0/(initializer_scale*filter_dim2*filter_dim2*third_layer))))
}
biases = {
'bias1': tf.Variable(tf.random_normal([first_layer],stddev=(1.0/(initializer_scale*filter_dim*filter_dim*input_layer)))),
'bias2': tf.Variable(tf.random_normal([second_layer],stddev=(1.0/(initializer_scale*filter_dim2*filter_dim2*first_layer)))),
'bias3': tf.Variable(tf.random_normal([third_layer],stddev=(1.0/(initializer_scale*filter_dim2*filter_dim2*second_layer)))),
'bias_out': tf.Variable(tf.random_normal([output_layer],stddev=(1.0/(initializer_scale*filter_dim2*filter_dim2*third_layer))))
}
# tf Graph input
x = tf.placeholder(tf.float32, [None, image_dim, image_dim, 2])
y = tf.placeholder(tf.float32, [None, image_dim, image_dim, 1])
# model
prediction = conv_net(x, weights, biases)
# get variance to normalize error terms during training
variance = get_variance(difference_train)
# loss and optimization
cost = tf.reduce_mean(tf.square(tf.subtract(prediction, y)))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# session
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
epoch_count = 0
global_step = 0
start_time = time.time()
print("starting training ... ")
while epoch_count < epochs:
print('---------------------------------------------------------')
print('beginning epoch {} ...'.format(epoch_count))
epoch = get_epoch(train_data, target_data_train, batch_size)
for i in epoch:
x_data_train, y_data_train = np.asarray(epoch[i][0]), np.asarray(epoch[i][1])
sess.run(optimizer, feed_dict={x : x_data_train, y : y_data_train})
loss = sess.run(cost, feed_dict={x : x_data_train, y : y_data_train})
percent_error = 100*loss/variance
print(" - training global_step {0:4d} error: {1:8.4f} {2:8.2f}%".format(global_step, loss, percent_error))
global_step += 1
epoch_count+=1
print('optimization finished!')
print('\nstarting testing...')
score = sess.run(cost, feed_dict={x: test_data, y: target_data_test})
pred = sess.run(prediction, feed_dict={x: test_data})
for i in range(image_dim):
print(rand_img_test_1[0][i],rand_img_test_2[0][i], pred[0][0][i], difference_test[0][i])
print('---- score : {} ----'.format(score))
if __name__ == '__main__':
main()
|
mit
| 1,550,266,799,215,808,500
| 41.748344
| 198
| 0.623857
| false
| 3.139591
| true
| false
| false
|
gimli-org/gimli
|
doc/tutorials/dev/plot_XX_mod_fv_laplace-2d.py
|
1
|
2873
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
import pygimli as pg
import pygimli.solver as solver
from pygimli.viewer import showMesh
from pygimli.viewer.mpl import drawMesh, drawStreams
from pygimli.meshtools import createMesh
import matplotlib.pyplot as plt
import numpy as np
from solverFVM import solveFiniteVolume, createFVPostProzessMesh
# build domain
nSteps = 20
dPhi = (0.6 * np.pi)/nSteps
boundaries = []
for i in range(1, nSteps+1):
boundaries.append([np.cos(dPhi*i), np.sin(dPhi*i)])
poly = pg.Mesh(2)
nodes = []
for b in boundaries:
nodes.append(poly.createNode(b))
for b in boundaries[::-1]:
nodes.append(poly.createNode(pg.RVector3(b)*0.1))
for i in range(len(nodes)):
poly.createEdge(nodes[i], nodes[(i+1)%len(nodes)], 1)
mesh = createMesh(poly, quality=34, area=0.001, smooth=[0,10])
f = pg.Vector(mesh.cellCount(), 10)
a = pg.Vector(mesh.cellCount(), 0.1)
#Start FEM solution
swatch = pg.core.Stopwatch(True)
uDirichlet = [1, lambda p_: np.sin(np.arctan2(p_.center()[1],
p_.center()[0]))/p_.center().abs()]
uFEM = solver.solvePoisson(mesh, a=a, f=f, uBoundary=uDirichlet)
print('FEM:', swatch.duration(True))
ax1, cbar = showMesh(mesh, data=uFEM,
nLevs=12, cMin=0, cMax=10, colorBar=True,
showLater=True)
drawMesh(ax1, mesh)
#print(min(u), max(u))
uAna = np.array(list(map(lambda p_: np.sin(np.arctan2(p_[1],
p_[0]))/p_.abs(),
mesh.positions())))
#drawStreamLines2(ax1, mesh, data=u)
#ax2,cbar = showMesh(mesh, data=(u+1e-6)/(ua+1e-6), filled=True, colorBar=True, showLater=True)
#showMesh(amesh)
print('---:', swatch.duration(True))
uFV = solveFiniteVolume(mesh, a=a, f=f, uBoundary=uDirichlet)
print('FVM:', swatch.duration(True))
ax2, cbar = showMesh(mesh,
data=uFV,
cMin=0, cMax=10, logScale=False,
interpolate=False, shading='gouraud',
tri=1,
nLevs=12,
colorBar=True, showLater=True)
drawMesh(ax2, mesh)
#allBounds = pg.solver.parseArgToBoundaries(uDirichlet, mesh)
#bounds, vals = zip(*allBounds)
#uDirVals = pg.solver.generateBoundaryValue(bounds, vals)
mesh2, u2 = createFVPostProzessMesh(mesh, uFV, uDirichlet)
print('---:', swatch.duration(True))
ax3, cbar = showMesh(mesh2, data=u2,
nLevs=12, cMin=0, cMax=10, colorBar=True,
showLater=True)
drawMesh(ax3, mesh2)
#ax3,cbar = showMesh(mesh,
#data=np.array(list(map(lambda p_: np.sin(np.arctan2(p_[1],p_[0]))/p_.abs(), mesh.cellCenter()))),
#cMin=0, cMax=10, logScale=False,
#showLater=True)
#drawMesh(ax3, mesh)
plt.show()
#drawMesh(ax, grid)
|
apache-2.0
| -7,926,348,419,567,565,000
| 28.030303
| 118
| 0.604595
| false
| 2.873
| false
| false
| false
|
open-mmlab/mmdetection
|
mmdet/models/dense_heads/corner_head.py
|
1
|
46890
|
from logging import warning
from math import ceil, log
import torch
import torch.nn as nn
from mmcv.cnn import ConvModule, bias_init_with_prob
from mmcv.ops import CornerPool, batched_nms
from mmcv.runner import BaseModule
from mmdet.core import multi_apply
from ..builder import HEADS, build_loss
from ..utils import gaussian_radius, gen_gaussian_target
from ..utils.gaussian_target import (gather_feat, get_local_maximum,
get_topk_from_heatmap,
transpose_and_gather_feat)
from .base_dense_head import BaseDenseHead
from .dense_test_mixins import BBoxTestMixin
class BiCornerPool(BaseModule):
"""Bidirectional Corner Pooling Module (TopLeft, BottomRight, etc.)
Args:
in_channels (int): Input channels of module.
out_channels (int): Output channels of module.
feat_channels (int): Feature channels of module.
directions (list[str]): Directions of two CornerPools.
norm_cfg (dict): Dictionary to construct and config norm layer.
init_cfg (dict or list[dict], optional): Initialization config dict.
Default: None
"""
def __init__(self,
in_channels,
directions,
feat_channels=128,
out_channels=128,
norm_cfg=dict(type='BN', requires_grad=True),
init_cfg=None):
super(BiCornerPool, self).__init__(init_cfg)
self.direction1_conv = ConvModule(
in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg)
self.direction2_conv = ConvModule(
in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg)
self.aftpool_conv = ConvModule(
feat_channels,
out_channels,
3,
padding=1,
norm_cfg=norm_cfg,
act_cfg=None)
self.conv1 = ConvModule(
in_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None)
self.conv2 = ConvModule(
in_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg)
self.direction1_pool = CornerPool(directions[0])
self.direction2_pool = CornerPool(directions[1])
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
"""Forward features from the upstream network.
Args:
x (tensor): Input feature of BiCornerPool.
Returns:
conv2 (tensor): Output feature of BiCornerPool.
"""
direction1_conv = self.direction1_conv(x)
direction2_conv = self.direction2_conv(x)
direction1_feat = self.direction1_pool(direction1_conv)
direction2_feat = self.direction2_pool(direction2_conv)
aftpool_conv = self.aftpool_conv(direction1_feat + direction2_feat)
conv1 = self.conv1(x)
relu = self.relu(aftpool_conv + conv1)
conv2 = self.conv2(relu)
return conv2
@HEADS.register_module()
class CornerHead(BaseDenseHead, BBoxTestMixin):
"""Head of CornerNet: Detecting Objects as Paired Keypoints.
Code is modified from the `official github repo
<https://github.com/princeton-vl/CornerNet/blob/master/models/py_utils/
kp.py#L73>`_ .
More details can be found in the `paper
<https://arxiv.org/abs/1808.01244>`_ .
Args:
num_classes (int): Number of categories excluding the background
category.
in_channels (int): Number of channels in the input feature map.
num_feat_levels (int): Levels of feature from the previous module. 2
for HourglassNet-104 and 1 for HourglassNet-52. Because
HourglassNet-104 outputs the final feature and intermediate
supervision feature and HourglassNet-52 only outputs the final
feature. Default: 2.
corner_emb_channels (int): Channel of embedding vector. Default: 1.
train_cfg (dict | None): Training config. Useless in CornerHead,
but we keep this variable for SingleStageDetector. Default: None.
test_cfg (dict | None): Testing config of CornerHead. Default: None.
loss_heatmap (dict | None): Config of corner heatmap loss. Default:
GaussianFocalLoss.
loss_embedding (dict | None): Config of corner embedding loss. Default:
AssociativeEmbeddingLoss.
loss_offset (dict | None): Config of corner offset loss. Default:
SmoothL1Loss.
init_cfg (dict or list[dict], optional): Initialization config dict.
Default: None
"""
def __init__(self,
num_classes,
in_channels,
num_feat_levels=2,
corner_emb_channels=1,
train_cfg=None,
test_cfg=None,
loss_heatmap=dict(
type='GaussianFocalLoss',
alpha=2.0,
gamma=4.0,
loss_weight=1),
loss_embedding=dict(
type='AssociativeEmbeddingLoss',
pull_weight=0.25,
push_weight=0.25),
loss_offset=dict(
type='SmoothL1Loss', beta=1.0, loss_weight=1),
init_cfg=None):
assert init_cfg is None, 'To prevent abnormal initialization ' \
'behavior, init_cfg is not allowed to be set'
super(CornerHead, self).__init__(init_cfg)
self.num_classes = num_classes
self.in_channels = in_channels
self.corner_emb_channels = corner_emb_channels
self.with_corner_emb = self.corner_emb_channels > 0
self.corner_offset_channels = 2
self.num_feat_levels = num_feat_levels
self.loss_heatmap = build_loss(
loss_heatmap) if loss_heatmap is not None else None
self.loss_embedding = build_loss(
loss_embedding) if loss_embedding is not None else None
self.loss_offset = build_loss(
loss_offset) if loss_offset is not None else None
self.train_cfg = train_cfg
self.test_cfg = test_cfg
self._init_layers()
def _make_layers(self, out_channels, in_channels=256, feat_channels=256):
"""Initialize conv sequential for CornerHead."""
return nn.Sequential(
ConvModule(in_channels, feat_channels, 3, padding=1),
ConvModule(
feat_channels, out_channels, 1, norm_cfg=None, act_cfg=None))
def _init_corner_kpt_layers(self):
"""Initialize corner keypoint layers.
Including corner heatmap branch and corner offset branch. Each branch
has two parts: prefix `tl_` for top-left and `br_` for bottom-right.
"""
self.tl_pool, self.br_pool = nn.ModuleList(), nn.ModuleList()
self.tl_heat, self.br_heat = nn.ModuleList(), nn.ModuleList()
self.tl_off, self.br_off = nn.ModuleList(), nn.ModuleList()
for _ in range(self.num_feat_levels):
self.tl_pool.append(
BiCornerPool(
self.in_channels, ['top', 'left'],
out_channels=self.in_channels))
self.br_pool.append(
BiCornerPool(
self.in_channels, ['bottom', 'right'],
out_channels=self.in_channels))
self.tl_heat.append(
self._make_layers(
out_channels=self.num_classes,
in_channels=self.in_channels))
self.br_heat.append(
self._make_layers(
out_channels=self.num_classes,
in_channels=self.in_channels))
self.tl_off.append(
self._make_layers(
out_channels=self.corner_offset_channels,
in_channels=self.in_channels))
self.br_off.append(
self._make_layers(
out_channels=self.corner_offset_channels,
in_channels=self.in_channels))
def _init_corner_emb_layers(self):
"""Initialize corner embedding layers.
Only include corner embedding branch with two parts: prefix `tl_` for
top-left and `br_` for bottom-right.
"""
self.tl_emb, self.br_emb = nn.ModuleList(), nn.ModuleList()
for _ in range(self.num_feat_levels):
self.tl_emb.append(
self._make_layers(
out_channels=self.corner_emb_channels,
in_channels=self.in_channels))
self.br_emb.append(
self._make_layers(
out_channels=self.corner_emb_channels,
in_channels=self.in_channels))
def _init_layers(self):
"""Initialize layers for CornerHead.
Including two parts: corner keypoint layers and corner embedding layers
"""
self._init_corner_kpt_layers()
if self.with_corner_emb:
self._init_corner_emb_layers()
def init_weights(self):
super(CornerHead, self).init_weights()
bias_init = bias_init_with_prob(0.1)
for i in range(self.num_feat_levels):
# The initialization of parameters are different between
# nn.Conv2d and ConvModule. Our experiments show that
# using the original initialization of nn.Conv2d increases
# the final mAP by about 0.2%
self.tl_heat[i][-1].conv.reset_parameters()
self.tl_heat[i][-1].conv.bias.data.fill_(bias_init)
self.br_heat[i][-1].conv.reset_parameters()
self.br_heat[i][-1].conv.bias.data.fill_(bias_init)
self.tl_off[i][-1].conv.reset_parameters()
self.br_off[i][-1].conv.reset_parameters()
if self.with_corner_emb:
self.tl_emb[i][-1].conv.reset_parameters()
self.br_emb[i][-1].conv.reset_parameters()
def forward(self, feats):
"""Forward features from the upstream network.
Args:
feats (tuple[Tensor]): Features from the upstream network, each is
a 4D-tensor.
Returns:
tuple: Usually a tuple of corner heatmaps, offset heatmaps and
embedding heatmaps.
- tl_heats (list[Tensor]): Top-left corner heatmaps for all
levels, each is a 4D-tensor, the channels number is
num_classes.
- br_heats (list[Tensor]): Bottom-right corner heatmaps for all
levels, each is a 4D-tensor, the channels number is
num_classes.
- tl_embs (list[Tensor] | list[None]): Top-left embedding
heatmaps for all levels, each is a 4D-tensor or None.
If not None, the channels number is corner_emb_channels.
- br_embs (list[Tensor] | list[None]): Bottom-right embedding
heatmaps for all levels, each is a 4D-tensor or None.
If not None, the channels number is corner_emb_channels.
- tl_offs (list[Tensor]): Top-left offset heatmaps for all
levels, each is a 4D-tensor. The channels number is
corner_offset_channels.
- br_offs (list[Tensor]): Bottom-right offset heatmaps for all
levels, each is a 4D-tensor. The channels number is
corner_offset_channels.
"""
lvl_ind = list(range(self.num_feat_levels))
return multi_apply(self.forward_single, feats, lvl_ind)
def forward_single(self, x, lvl_ind, return_pool=False):
"""Forward feature of a single level.
Args:
x (Tensor): Feature of a single level.
lvl_ind (int): Level index of current feature.
return_pool (bool): Return corner pool feature or not.
Returns:
tuple[Tensor]: A tuple of CornerHead's output for current feature
level. Containing the following Tensors:
- tl_heat (Tensor): Predicted top-left corner heatmap.
- br_heat (Tensor): Predicted bottom-right corner heatmap.
- tl_emb (Tensor | None): Predicted top-left embedding heatmap.
None for `self.with_corner_emb == False`.
- br_emb (Tensor | None): Predicted bottom-right embedding
heatmap. None for `self.with_corner_emb == False`.
- tl_off (Tensor): Predicted top-left offset heatmap.
- br_off (Tensor): Predicted bottom-right offset heatmap.
- tl_pool (Tensor): Top-left corner pool feature. Not must
have.
- br_pool (Tensor): Bottom-right corner pool feature. Not must
have.
"""
tl_pool = self.tl_pool[lvl_ind](x)
tl_heat = self.tl_heat[lvl_ind](tl_pool)
br_pool = self.br_pool[lvl_ind](x)
br_heat = self.br_heat[lvl_ind](br_pool)
tl_emb, br_emb = None, None
if self.with_corner_emb:
tl_emb = self.tl_emb[lvl_ind](tl_pool)
br_emb = self.br_emb[lvl_ind](br_pool)
tl_off = self.tl_off[lvl_ind](tl_pool)
br_off = self.br_off[lvl_ind](br_pool)
result_list = [tl_heat, br_heat, tl_emb, br_emb, tl_off, br_off]
if return_pool:
result_list.append(tl_pool)
result_list.append(br_pool)
return result_list
def get_targets(self,
gt_bboxes,
gt_labels,
feat_shape,
img_shape,
with_corner_emb=False,
with_guiding_shift=False,
with_centripetal_shift=False):
"""Generate corner targets.
Including corner heatmap, corner offset.
Optional: corner embedding, corner guiding shift, centripetal shift.
For CornerNet, we generate corner heatmap, corner offset and corner
embedding from this function.
For CentripetalNet, we generate corner heatmap, corner offset, guiding
shift and centripetal shift from this function.
Args:
gt_bboxes (list[Tensor]): Ground truth bboxes of each image, each
has shape (num_gt, 4).
gt_labels (list[Tensor]): Ground truth labels of each box, each has
shape (num_gt,).
feat_shape (list[int]): Shape of output feature,
[batch, channel, height, width].
img_shape (list[int]): Shape of input image,
[height, width, channel].
with_corner_emb (bool): Generate corner embedding target or not.
Default: False.
with_guiding_shift (bool): Generate guiding shift target or not.
Default: False.
with_centripetal_shift (bool): Generate centripetal shift target or
not. Default: False.
Returns:
dict: Ground truth of corner heatmap, corner offset, corner
embedding, guiding shift and centripetal shift. Containing the
following keys:
- topleft_heatmap (Tensor): Ground truth top-left corner
heatmap.
- bottomright_heatmap (Tensor): Ground truth bottom-right
corner heatmap.
- topleft_offset (Tensor): Ground truth top-left corner offset.
- bottomright_offset (Tensor): Ground truth bottom-right corner
offset.
- corner_embedding (list[list[list[int]]]): Ground truth corner
embedding. Not must have.
- topleft_guiding_shift (Tensor): Ground truth top-left corner
guiding shift. Not must have.
- bottomright_guiding_shift (Tensor): Ground truth bottom-right
corner guiding shift. Not must have.
- topleft_centripetal_shift (Tensor): Ground truth top-left
corner centripetal shift. Not must have.
- bottomright_centripetal_shift (Tensor): Ground truth
bottom-right corner centripetal shift. Not must have.
"""
batch_size, _, height, width = feat_shape
img_h, img_w = img_shape[:2]
width_ratio = float(width / img_w)
height_ratio = float(height / img_h)
gt_tl_heatmap = gt_bboxes[-1].new_zeros(
[batch_size, self.num_classes, height, width])
gt_br_heatmap = gt_bboxes[-1].new_zeros(
[batch_size, self.num_classes, height, width])
gt_tl_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width])
gt_br_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width])
if with_corner_emb:
match = []
# Guiding shift is a kind of offset, from center to corner
if with_guiding_shift:
gt_tl_guiding_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
gt_br_guiding_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
# Centripetal shift is also a kind of offset, from center to corner
# and normalized by log.
if with_centripetal_shift:
gt_tl_centripetal_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
gt_br_centripetal_shift = gt_bboxes[-1].new_zeros(
[batch_size, 2, height, width])
for batch_id in range(batch_size):
# Ground truth of corner embedding per image is a list of coord set
corner_match = []
for box_id in range(len(gt_labels[batch_id])):
left, top, right, bottom = gt_bboxes[batch_id][box_id]
center_x = (left + right) / 2.0
center_y = (top + bottom) / 2.0
label = gt_labels[batch_id][box_id]
# Use coords in the feature level to generate ground truth
scale_left = left * width_ratio
scale_right = right * width_ratio
scale_top = top * height_ratio
scale_bottom = bottom * height_ratio
scale_center_x = center_x * width_ratio
scale_center_y = center_y * height_ratio
# Int coords on feature map/ground truth tensor
left_idx = int(min(scale_left, width - 1))
right_idx = int(min(scale_right, width - 1))
top_idx = int(min(scale_top, height - 1))
bottom_idx = int(min(scale_bottom, height - 1))
# Generate gaussian heatmap
scale_box_width = ceil(scale_right - scale_left)
scale_box_height = ceil(scale_bottom - scale_top)
radius = gaussian_radius((scale_box_height, scale_box_width),
min_overlap=0.3)
radius = max(0, int(radius))
gt_tl_heatmap[batch_id, label] = gen_gaussian_target(
gt_tl_heatmap[batch_id, label], [left_idx, top_idx],
radius)
gt_br_heatmap[batch_id, label] = gen_gaussian_target(
gt_br_heatmap[batch_id, label], [right_idx, bottom_idx],
radius)
# Generate corner offset
left_offset = scale_left - left_idx
top_offset = scale_top - top_idx
right_offset = scale_right - right_idx
bottom_offset = scale_bottom - bottom_idx
gt_tl_offset[batch_id, 0, top_idx, left_idx] = left_offset
gt_tl_offset[batch_id, 1, top_idx, left_idx] = top_offset
gt_br_offset[batch_id, 0, bottom_idx, right_idx] = right_offset
gt_br_offset[batch_id, 1, bottom_idx,
right_idx] = bottom_offset
# Generate corner embedding
if with_corner_emb:
corner_match.append([[top_idx, left_idx],
[bottom_idx, right_idx]])
# Generate guiding shift
if with_guiding_shift:
gt_tl_guiding_shift[batch_id, 0, top_idx,
left_idx] = scale_center_x - left_idx
gt_tl_guiding_shift[batch_id, 1, top_idx,
left_idx] = scale_center_y - top_idx
gt_br_guiding_shift[batch_id, 0, bottom_idx,
right_idx] = right_idx - scale_center_x
gt_br_guiding_shift[
batch_id, 1, bottom_idx,
right_idx] = bottom_idx - scale_center_y
# Generate centripetal shift
if with_centripetal_shift:
gt_tl_centripetal_shift[batch_id, 0, top_idx,
left_idx] = log(scale_center_x -
scale_left)
gt_tl_centripetal_shift[batch_id, 1, top_idx,
left_idx] = log(scale_center_y -
scale_top)
gt_br_centripetal_shift[batch_id, 0, bottom_idx,
right_idx] = log(scale_right -
scale_center_x)
gt_br_centripetal_shift[batch_id, 1, bottom_idx,
right_idx] = log(scale_bottom -
scale_center_y)
if with_corner_emb:
match.append(corner_match)
target_result = dict(
topleft_heatmap=gt_tl_heatmap,
topleft_offset=gt_tl_offset,
bottomright_heatmap=gt_br_heatmap,
bottomright_offset=gt_br_offset)
if with_corner_emb:
target_result.update(corner_embedding=match)
if with_guiding_shift:
target_result.update(
topleft_guiding_shift=gt_tl_guiding_shift,
bottomright_guiding_shift=gt_br_guiding_shift)
if with_centripetal_shift:
target_result.update(
topleft_centripetal_shift=gt_tl_centripetal_shift,
bottomright_centripetal_shift=gt_br_centripetal_shift)
return target_result
def loss(self,
tl_heats,
br_heats,
tl_embs,
br_embs,
tl_offs,
br_offs,
gt_bboxes,
gt_labels,
img_metas,
gt_bboxes_ignore=None):
"""Compute losses of the head.
Args:
tl_heats (list[Tensor]): Top-left corner heatmaps for each level
with shape (N, num_classes, H, W).
br_heats (list[Tensor]): Bottom-right corner heatmaps for each
level with shape (N, num_classes, H, W).
tl_embs (list[Tensor]): Top-left corner embeddings for each level
with shape (N, corner_emb_channels, H, W).
br_embs (list[Tensor]): Bottom-right corner embeddings for each
level with shape (N, corner_emb_channels, H, W).
tl_offs (list[Tensor]): Top-left corner offsets for each level
with shape (N, corner_offset_channels, H, W).
br_offs (list[Tensor]): Bottom-right corner offsets for each level
with shape (N, corner_offset_channels, H, W).
gt_bboxes (list[Tensor]): Ground truth bboxes for each image with
shape (num_gts, 4) in [left, top, right, bottom] format.
gt_labels (list[Tensor]): Class indices corresponding to each box.
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
gt_bboxes_ignore (list[Tensor] | None): Specify which bounding
boxes can be ignored when computing the loss.
Returns:
dict[str, Tensor]: A dictionary of loss components. Containing the
following losses:
- det_loss (list[Tensor]): Corner keypoint losses of all
feature levels.
- pull_loss (list[Tensor]): Part one of AssociativeEmbedding
losses of all feature levels.
- push_loss (list[Tensor]): Part two of AssociativeEmbedding
losses of all feature levels.
- off_loss (list[Tensor]): Corner offset losses of all feature
levels.
"""
targets = self.get_targets(
gt_bboxes,
gt_labels,
tl_heats[-1].shape,
img_metas[0]['pad_shape'],
with_corner_emb=self.with_corner_emb)
mlvl_targets = [targets for _ in range(self.num_feat_levels)]
det_losses, pull_losses, push_losses, off_losses = multi_apply(
self.loss_single, tl_heats, br_heats, tl_embs, br_embs, tl_offs,
br_offs, mlvl_targets)
loss_dict = dict(det_loss=det_losses, off_loss=off_losses)
if self.with_corner_emb:
loss_dict.update(pull_loss=pull_losses, push_loss=push_losses)
return loss_dict
def loss_single(self, tl_hmp, br_hmp, tl_emb, br_emb, tl_off, br_off,
targets):
"""Compute losses for single level.
Args:
tl_hmp (Tensor): Top-left corner heatmap for current level with
shape (N, num_classes, H, W).
br_hmp (Tensor): Bottom-right corner heatmap for current level with
shape (N, num_classes, H, W).
tl_emb (Tensor): Top-left corner embedding for current level with
shape (N, corner_emb_channels, H, W).
br_emb (Tensor): Bottom-right corner embedding for current level
with shape (N, corner_emb_channels, H, W).
tl_off (Tensor): Top-left corner offset for current level with
shape (N, corner_offset_channels, H, W).
br_off (Tensor): Bottom-right corner offset for current level with
shape (N, corner_offset_channels, H, W).
targets (dict): Corner target generated by `get_targets`.
Returns:
tuple[torch.Tensor]: Losses of the head's differnet branches
containing the following losses:
- det_loss (Tensor): Corner keypoint loss.
- pull_loss (Tensor): Part one of AssociativeEmbedding loss.
- push_loss (Tensor): Part two of AssociativeEmbedding loss.
- off_loss (Tensor): Corner offset loss.
"""
gt_tl_hmp = targets['topleft_heatmap']
gt_br_hmp = targets['bottomright_heatmap']
gt_tl_off = targets['topleft_offset']
gt_br_off = targets['bottomright_offset']
gt_embedding = targets['corner_embedding']
# Detection loss
tl_det_loss = self.loss_heatmap(
tl_hmp.sigmoid(),
gt_tl_hmp,
avg_factor=max(1,
gt_tl_hmp.eq(1).sum()))
br_det_loss = self.loss_heatmap(
br_hmp.sigmoid(),
gt_br_hmp,
avg_factor=max(1,
gt_br_hmp.eq(1).sum()))
det_loss = (tl_det_loss + br_det_loss) / 2.0
# AssociativeEmbedding loss
if self.with_corner_emb and self.loss_embedding is not None:
pull_loss, push_loss = self.loss_embedding(tl_emb, br_emb,
gt_embedding)
else:
pull_loss, push_loss = None, None
# Offset loss
# We only compute the offset loss at the real corner position.
# The value of real corner would be 1 in heatmap ground truth.
# The mask is computed in class agnostic mode and its shape is
# batch * 1 * width * height.
tl_off_mask = gt_tl_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as(
gt_tl_hmp)
br_off_mask = gt_br_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as(
gt_br_hmp)
tl_off_loss = self.loss_offset(
tl_off,
gt_tl_off,
tl_off_mask,
avg_factor=max(1, tl_off_mask.sum()))
br_off_loss = self.loss_offset(
br_off,
gt_br_off,
br_off_mask,
avg_factor=max(1, br_off_mask.sum()))
off_loss = (tl_off_loss + br_off_loss) / 2.0
return det_loss, pull_loss, push_loss, off_loss
def get_bboxes(self,
tl_heats,
br_heats,
tl_embs,
br_embs,
tl_offs,
br_offs,
img_metas,
rescale=False,
with_nms=True):
"""Transform network output for a batch into bbox predictions.
Args:
tl_heats (list[Tensor]): Top-left corner heatmaps for each level
with shape (N, num_classes, H, W).
br_heats (list[Tensor]): Bottom-right corner heatmaps for each
level with shape (N, num_classes, H, W).
tl_embs (list[Tensor]): Top-left corner embeddings for each level
with shape (N, corner_emb_channels, H, W).
br_embs (list[Tensor]): Bottom-right corner embeddings for each
level with shape (N, corner_emb_channels, H, W).
tl_offs (list[Tensor]): Top-left corner offsets for each level
with shape (N, corner_offset_channels, H, W).
br_offs (list[Tensor]): Bottom-right corner offsets for each level
with shape (N, corner_offset_channels, H, W).
img_metas (list[dict]): Meta information of each image, e.g.,
image size, scaling factor, etc.
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before return boxes.
Default: True.
"""
assert tl_heats[-1].shape[0] == br_heats[-1].shape[0] == len(img_metas)
result_list = []
for img_id in range(len(img_metas)):
result_list.append(
self._get_bboxes_single(
tl_heats[-1][img_id:img_id + 1, :],
br_heats[-1][img_id:img_id + 1, :],
tl_offs[-1][img_id:img_id + 1, :],
br_offs[-1][img_id:img_id + 1, :],
img_metas[img_id],
tl_emb=tl_embs[-1][img_id:img_id + 1, :],
br_emb=br_embs[-1][img_id:img_id + 1, :],
rescale=rescale,
with_nms=with_nms))
if torch.onnx.is_in_onnx_export():
assert len(
img_metas
) == 1, 'Only support one input image while in exporting to ONNX'
detections, labels = result_list[0]
# batch_size 1 here, [1, num_det, 5], [1, num_det]
return detections.unsqueeze(0), labels.unsqueeze(0)
return result_list
def _get_bboxes_single(self,
tl_heat,
br_heat,
tl_off,
br_off,
img_meta,
tl_emb=None,
br_emb=None,
tl_centripetal_shift=None,
br_centripetal_shift=None,
rescale=False,
with_nms=True):
"""Transform outputs for a single batch item into bbox predictions.
Args:
tl_heat (Tensor): Top-left corner heatmap for current level with
shape (N, num_classes, H, W).
br_heat (Tensor): Bottom-right corner heatmap for current level
with shape (N, num_classes, H, W).
tl_off (Tensor): Top-left corner offset for current level with
shape (N, corner_offset_channels, H, W).
br_off (Tensor): Bottom-right corner offset for current level with
shape (N, corner_offset_channels, H, W).
img_meta (dict): Meta information of current image, e.g.,
image size, scaling factor, etc.
tl_emb (Tensor): Top-left corner embedding for current level with
shape (N, corner_emb_channels, H, W).
br_emb (Tensor): Bottom-right corner embedding for current level
with shape (N, corner_emb_channels, H, W).
tl_centripetal_shift: Top-left corner's centripetal shift for
current level with shape (N, 2, H, W).
br_centripetal_shift: Bottom-right corner's centripetal shift for
current level with shape (N, 2, H, W).
rescale (bool): If True, return boxes in original image space.
Default: False.
with_nms (bool): If True, do nms before return boxes.
Default: True.
"""
if isinstance(img_meta, (list, tuple)):
img_meta = img_meta[0]
batch_bboxes, batch_scores, batch_clses = self.decode_heatmap(
tl_heat=tl_heat.sigmoid(),
br_heat=br_heat.sigmoid(),
tl_off=tl_off,
br_off=br_off,
tl_emb=tl_emb,
br_emb=br_emb,
tl_centripetal_shift=tl_centripetal_shift,
br_centripetal_shift=br_centripetal_shift,
img_meta=img_meta,
k=self.test_cfg.corner_topk,
kernel=self.test_cfg.local_maximum_kernel,
distance_threshold=self.test_cfg.distance_threshold)
if rescale:
batch_bboxes /= batch_bboxes.new_tensor(img_meta['scale_factor'])
bboxes = batch_bboxes.view([-1, 4])
scores = batch_scores.view([-1, 1])
clses = batch_clses.view([-1, 1])
# use `sort` instead of `argsort` here, since currently exporting
# `argsort` to ONNX opset version 11 is not supported
scores, idx = scores.sort(dim=0, descending=True)
bboxes = bboxes[idx].view([-1, 4])
scores = scores.view(-1)
clses = clses[idx].view(-1)
detections = torch.cat([bboxes, scores.unsqueeze(-1)], -1)
keepinds = (detections[:, -1] > -0.1)
detections = detections[keepinds]
labels = clses[keepinds]
if with_nms:
detections, labels = self._bboxes_nms(detections, labels,
self.test_cfg)
return detections, labels
def _bboxes_nms(self, bboxes, labels, cfg):
if labels.numel() == 0:
return bboxes, labels
if 'nms_cfg' in cfg:
warning.warn('nms_cfg in test_cfg will be deprecated. '
'Please rename it as nms')
if 'nms' not in cfg:
cfg.nms = cfg.nms_cfg
out_bboxes, keep = batched_nms(bboxes[:, :4], bboxes[:, -1], labels,
cfg.nms)
out_labels = labels[keep]
if len(out_bboxes) > 0:
# use `sort` to replace with `argsort` here
_, idx = torch.sort(out_bboxes[:, -1], descending=True)
max_per_img = out_bboxes.new_tensor(cfg.max_per_img).to(torch.long)
nms_after = max_per_img
if torch.onnx.is_in_onnx_export():
# Always keep topk op for dynamic input in onnx
from mmdet.core.export import get_k_for_topk
nms_after = get_k_for_topk(max_per_img, out_bboxes.shape[0])
idx = idx[:nms_after]
out_bboxes = out_bboxes[idx]
out_labels = out_labels[idx]
return out_bboxes, out_labels
def decode_heatmap(self,
tl_heat,
br_heat,
tl_off,
br_off,
tl_emb=None,
br_emb=None,
tl_centripetal_shift=None,
br_centripetal_shift=None,
img_meta=None,
k=100,
kernel=3,
distance_threshold=0.5,
num_dets=1000):
"""Transform outputs for a single batch item into raw bbox predictions.
Args:
tl_heat (Tensor): Top-left corner heatmap for current level with
shape (N, num_classes, H, W).
br_heat (Tensor): Bottom-right corner heatmap for current level
with shape (N, num_classes, H, W).
tl_off (Tensor): Top-left corner offset for current level with
shape (N, corner_offset_channels, H, W).
br_off (Tensor): Bottom-right corner offset for current level with
shape (N, corner_offset_channels, H, W).
tl_emb (Tensor | None): Top-left corner embedding for current
level with shape (N, corner_emb_channels, H, W).
br_emb (Tensor | None): Bottom-right corner embedding for current
level with shape (N, corner_emb_channels, H, W).
tl_centripetal_shift (Tensor | None): Top-left centripetal shift
for current level with shape (N, 2, H, W).
br_centripetal_shift (Tensor | None): Bottom-right centripetal
shift for current level with shape (N, 2, H, W).
img_meta (dict): Meta information of current image, e.g.,
image size, scaling factor, etc.
k (int): Get top k corner keypoints from heatmap.
kernel (int): Max pooling kernel for extract local maximum pixels.
distance_threshold (float): Distance threshold. Top-left and
bottom-right corner keypoints with feature distance less than
the threshold will be regarded as keypoints from same object.
num_dets (int): Num of raw boxes before doing nms.
Returns:
tuple[torch.Tensor]: Decoded output of CornerHead, containing the
following Tensors:
- bboxes (Tensor): Coords of each box.
- scores (Tensor): Scores of each box.
- clses (Tensor): Categories of each box.
"""
with_embedding = tl_emb is not None and br_emb is not None
with_centripetal_shift = (
tl_centripetal_shift is not None
and br_centripetal_shift is not None)
assert with_embedding + with_centripetal_shift == 1
batch, _, height, width = tl_heat.size()
if torch.onnx.is_in_onnx_export():
inp_h, inp_w = img_meta['pad_shape_for_onnx'][:2]
else:
inp_h, inp_w, _ = img_meta['pad_shape']
# perform nms on heatmaps
tl_heat = get_local_maximum(tl_heat, kernel=kernel)
br_heat = get_local_maximum(br_heat, kernel=kernel)
tl_scores, tl_inds, tl_clses, tl_ys, tl_xs = get_topk_from_heatmap(
tl_heat, k=k)
br_scores, br_inds, br_clses, br_ys, br_xs = get_topk_from_heatmap(
br_heat, k=k)
# We use repeat instead of expand here because expand is a
# shallow-copy function. Thus it could cause unexpected testing result
# sometimes. Using expand will decrease about 10% mAP during testing
# compared to repeat.
tl_ys = tl_ys.view(batch, k, 1).repeat(1, 1, k)
tl_xs = tl_xs.view(batch, k, 1).repeat(1, 1, k)
br_ys = br_ys.view(batch, 1, k).repeat(1, k, 1)
br_xs = br_xs.view(batch, 1, k).repeat(1, k, 1)
tl_off = transpose_and_gather_feat(tl_off, tl_inds)
tl_off = tl_off.view(batch, k, 1, 2)
br_off = transpose_and_gather_feat(br_off, br_inds)
br_off = br_off.view(batch, 1, k, 2)
tl_xs = tl_xs + tl_off[..., 0]
tl_ys = tl_ys + tl_off[..., 1]
br_xs = br_xs + br_off[..., 0]
br_ys = br_ys + br_off[..., 1]
if with_centripetal_shift:
tl_centripetal_shift = transpose_and_gather_feat(
tl_centripetal_shift, tl_inds).view(batch, k, 1, 2).exp()
br_centripetal_shift = transpose_and_gather_feat(
br_centripetal_shift, br_inds).view(batch, 1, k, 2).exp()
tl_ctxs = tl_xs + tl_centripetal_shift[..., 0]
tl_ctys = tl_ys + tl_centripetal_shift[..., 1]
br_ctxs = br_xs - br_centripetal_shift[..., 0]
br_ctys = br_ys - br_centripetal_shift[..., 1]
# all possible boxes based on top k corners (ignoring class)
tl_xs *= (inp_w / width)
tl_ys *= (inp_h / height)
br_xs *= (inp_w / width)
br_ys *= (inp_h / height)
if with_centripetal_shift:
tl_ctxs *= (inp_w / width)
tl_ctys *= (inp_h / height)
br_ctxs *= (inp_w / width)
br_ctys *= (inp_h / height)
x_off, y_off = 0, 0 # no crop
if not torch.onnx.is_in_onnx_export():
# since `RandomCenterCropPad` is done on CPU with numpy and it's
# not dynamic traceable when exporting to ONNX, thus 'border'
# does not appears as key in 'img_meta'. As a tmp solution,
# we move this 'border' handle part to the postprocess after
# finished exporting to ONNX, which is handle in
# `mmdet/core/export/model_wrappers.py`. Though difference between
# pytorch and exported onnx model, it might be ignored since
# comparable performance is achieved between them (e.g. 40.4 vs
# 40.6 on COCO val2017, for CornerNet without test-time flip)
if 'border' in img_meta:
x_off = img_meta['border'][2]
y_off = img_meta['border'][0]
tl_xs -= x_off
tl_ys -= y_off
br_xs -= x_off
br_ys -= y_off
zeros = tl_xs.new_zeros(*tl_xs.size())
tl_xs = torch.where(tl_xs > 0.0, tl_xs, zeros)
tl_ys = torch.where(tl_ys > 0.0, tl_ys, zeros)
br_xs = torch.where(br_xs > 0.0, br_xs, zeros)
br_ys = torch.where(br_ys > 0.0, br_ys, zeros)
bboxes = torch.stack((tl_xs, tl_ys, br_xs, br_ys), dim=3)
area_bboxes = ((br_xs - tl_xs) * (br_ys - tl_ys)).abs()
if with_centripetal_shift:
tl_ctxs -= x_off
tl_ctys -= y_off
br_ctxs -= x_off
br_ctys -= y_off
tl_ctxs *= tl_ctxs.gt(0.0).type_as(tl_ctxs)
tl_ctys *= tl_ctys.gt(0.0).type_as(tl_ctys)
br_ctxs *= br_ctxs.gt(0.0).type_as(br_ctxs)
br_ctys *= br_ctys.gt(0.0).type_as(br_ctys)
ct_bboxes = torch.stack((tl_ctxs, tl_ctys, br_ctxs, br_ctys),
dim=3)
area_ct_bboxes = ((br_ctxs - tl_ctxs) * (br_ctys - tl_ctys)).abs()
rcentral = torch.zeros_like(ct_bboxes)
# magic nums from paper section 4.1
mu = torch.ones_like(area_bboxes) / 2.4
mu[area_bboxes > 3500] = 1 / 2.1 # large bbox have smaller mu
bboxes_center_x = (bboxes[..., 0] + bboxes[..., 2]) / 2
bboxes_center_y = (bboxes[..., 1] + bboxes[..., 3]) / 2
rcentral[..., 0] = bboxes_center_x - mu * (bboxes[..., 2] -
bboxes[..., 0]) / 2
rcentral[..., 1] = bboxes_center_y - mu * (bboxes[..., 3] -
bboxes[..., 1]) / 2
rcentral[..., 2] = bboxes_center_x + mu * (bboxes[..., 2] -
bboxes[..., 0]) / 2
rcentral[..., 3] = bboxes_center_y + mu * (bboxes[..., 3] -
bboxes[..., 1]) / 2
area_rcentral = ((rcentral[..., 2] - rcentral[..., 0]) *
(rcentral[..., 3] - rcentral[..., 1])).abs()
dists = area_ct_bboxes / area_rcentral
tl_ctx_inds = (ct_bboxes[..., 0] <= rcentral[..., 0]) | (
ct_bboxes[..., 0] >= rcentral[..., 2])
tl_cty_inds = (ct_bboxes[..., 1] <= rcentral[..., 1]) | (
ct_bboxes[..., 1] >= rcentral[..., 3])
br_ctx_inds = (ct_bboxes[..., 2] <= rcentral[..., 0]) | (
ct_bboxes[..., 2] >= rcentral[..., 2])
br_cty_inds = (ct_bboxes[..., 3] <= rcentral[..., 1]) | (
ct_bboxes[..., 3] >= rcentral[..., 3])
if with_embedding:
tl_emb = transpose_and_gather_feat(tl_emb, tl_inds)
tl_emb = tl_emb.view(batch, k, 1)
br_emb = transpose_and_gather_feat(br_emb, br_inds)
br_emb = br_emb.view(batch, 1, k)
dists = torch.abs(tl_emb - br_emb)
tl_scores = tl_scores.view(batch, k, 1).repeat(1, 1, k)
br_scores = br_scores.view(batch, 1, k).repeat(1, k, 1)
scores = (tl_scores + br_scores) / 2 # scores for all possible boxes
# tl and br should have same class
tl_clses = tl_clses.view(batch, k, 1).repeat(1, 1, k)
br_clses = br_clses.view(batch, 1, k).repeat(1, k, 1)
cls_inds = (tl_clses != br_clses)
# reject boxes based on distances
dist_inds = dists > distance_threshold
# reject boxes based on widths and heights
width_inds = (br_xs <= tl_xs)
height_inds = (br_ys <= tl_ys)
# No use `scores[cls_inds]`, instead we use `torch.where` here.
# Since only 1-D indices with type 'tensor(bool)' are supported
# when exporting to ONNX, any other bool indices with more dimensions
# (e.g. 2-D bool tensor) as input parameter in node is invalid
negative_scores = -1 * torch.ones_like(scores)
scores = torch.where(cls_inds, negative_scores, scores)
scores = torch.where(width_inds, negative_scores, scores)
scores = torch.where(height_inds, negative_scores, scores)
scores = torch.where(dist_inds, negative_scores, scores)
if with_centripetal_shift:
scores[tl_ctx_inds] = -1
scores[tl_cty_inds] = -1
scores[br_ctx_inds] = -1
scores[br_cty_inds] = -1
scores = scores.view(batch, -1)
scores, inds = torch.topk(scores, num_dets)
scores = scores.unsqueeze(2)
bboxes = bboxes.view(batch, -1, 4)
bboxes = gather_feat(bboxes, inds)
clses = tl_clses.contiguous().view(batch, -1, 1)
clses = gather_feat(clses, inds).float()
return bboxes, scores, clses
|
apache-2.0
| -2,730,451,611,199,976,400
| 43.572243
| 79
| 0.536362
| false
| 3.827755
| true
| false
| false
|
kg-bot/SupyBot
|
plugins/Linux/__init__.py
|
1
|
2354
|
###
# Copyright (c) 2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Linux-related commands.
"""
import supybot
import supybot.world as world
# Use this for the version of this plugin. You may wish to put a CVS keyword
# in here if you're keeping the plugin in CVS or some similar system.
__version__ = ""
__author__ = supybot.authors.jemfinch
# This is a dictionary mapping supybot.Author instances to lists of
# contributions.
__contributors__ = {}
import config
import plugin
reload(plugin) # In case we're being reloaded.
# Add more reloads here if you add third-party modules and want them to be
# reloaded when this plugin is reloaded. Don't forget to import them as well!
if world.testing:
import test
Class = plugin.Class
configure = config.configure
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|
gpl-3.0
| -8,622,309,444,501,066,000
| 38.233333
| 79
| 0.763806
| false
| 4.233813
| false
| false
| false
|
nkmk/python-snippets
|
notebook/pandas_set_index.py
|
1
|
3936
|
import pandas as pd
df = pd.read_csv('data/src/sample_pandas_normal.csv')
print(df)
# name age state point
# 0 Alice 24 NY 64
# 1 Bob 42 CA 92
# 2 Charlie 18 CA 70
# 3 Dave 68 TX 70
# 4 Ellen 24 CA 88
# 5 Frank 30 NY 57
df_i = df.set_index('name')
print(df_i)
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
df_id = df.set_index('name', drop=False)
print(df_id)
# name age state point
# name
# Alice Alice 24 NY 64
# Bob Bob 42 CA 92
# Charlie Charlie 18 CA 70
# Dave Dave 68 TX 70
# Ellen Ellen 24 CA 88
# Frank Frank 30 NY 57
df_mi = df.set_index(['state', 'name'])
print(df_mi)
# age point
# state name
# NY Alice 24 64
# CA Bob 42 92
# Charlie 18 70
# TX Dave 68 70
# CA Ellen 24 88
# NY Frank 30 57
df_mi.sort_index(inplace=True)
print(df_mi)
# age point
# state name
# CA Bob 42 92
# Charlie 18 70
# Ellen 24 88
# NY Alice 24 64
# Frank 30 57
# TX Dave 68 70
print(df_i)
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
df_ii = df_i.set_index('state')
print(df_ii)
# age point
# state
# NY 24 64
# CA 42 92
# CA 18 70
# TX 68 70
# CA 24 88
# NY 30 57
df_mi = df_i.set_index('state', append=True)
print(df_mi)
# age point
# name state
# Alice NY 24 64
# Bob CA 42 92
# Charlie CA 18 70
# Dave TX 68 70
# Ellen CA 24 88
# Frank NY 30 57
print(df_mi.swaplevel(0, 1))
# age point
# state name
# NY Alice 24 64
# CA Bob 42 92
# Charlie 18 70
# TX Dave 68 70
# CA Ellen 24 88
# NY Frank 30 57
print(df_i)
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
df_ri = df_i.reset_index()
print(df_ri)
# name age state point
# 0 Alice 24 NY 64
# 1 Bob 42 CA 92
# 2 Charlie 18 CA 70
# 3 Dave 68 TX 70
# 4 Ellen 24 CA 88
# 5 Frank 30 NY 57
df_change = df_i.reset_index().set_index('state')
print(df_change)
# name age point
# state
# NY Alice 24 64
# CA Bob 42 92
# CA Charlie 18 70
# TX Dave 68 70
# CA Ellen 24 88
# NY Frank 30 57
df.set_index('name', inplace=True)
print(df)
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
df = pd.read_csv('data/src/sample_pandas_normal.csv', index_col=0)
print(df)
# age state point
# name
# Alice 24 NY 64
# Bob 42 CA 92
# Charlie 18 CA 70
# Dave 68 TX 70
# Ellen 24 CA 88
# Frank 30 NY 57
print(df.loc['Bob'])
# age 42
# state CA
# point 92
# Name: Bob, dtype: object
print(df.at['Bob', 'age'])
# 42
|
mit
| 7,370,681,852,639,031,000
| 23.754717
| 66
| 0.424035
| false
| 2.825556
| false
| true
| false
|
AlgorithmLover/OJCodes
|
qlcoder/data_mining/topic_model/reference/refered_code.py
|
1
|
3286
|
#!/usr/bin/python
# -*- coding:utf8 -*-
import time
import jieba.analyse
def post_cut(url):
fr = open(url + "/post_data.txt")
fo = open(url + "/post_key.txt", "a+")
for line in fr.readlines():
term = line.strip().split("\t")
if len(term) == 3 and term[2] != "":
key_list = jieba.analyse.extract_tags(term[2], 30) # get keywords
ustr = term[0] + "\t"
for i in key_list:
ustr += i.encode("utf-8") + " "
fo.write(ustr + "\n")
fr.close()
fo.close()
def post_tfidf(url):
from sklearn.feature_extraction.text import HashingVectorizer
fr = open(url + "/post_key.txt")
id_list = []
data_list = []
for line in fr.readlines():
term = line.strip().split("\t")
if len(term) == 2:
id_list.append(term[0])
data_list.append(term[1])
hv = HashingVectorizer(n_features=10000, non_negative=True) # 该类实现hash技巧
post_tfidf = hv.fit_transform(data_list) # return feature vector 'fea_train' [n_samples,n_features]
print 'Size of fea_train:' + repr(post_tfidf.shape)
print post_tfidf.nnz
post_cluster(url, id_list, post_tfidf)
def post_cluster(url, id, tfidf_vec):
from sklearn.cluster import KMeans
kmean = KMeans(n_clusters=300)
print "kmeans"
kmean.fit(tfidf_vec)
pred = kmean.transform(tfidf_vec)
count1 = 0
count2 = 0
pred_str = []
for item in pred:
count1 += 1
vec = ""
for tmp in item:
vec += str(tmp)[0:7] + "\t"
pred_str.append(vec)
print len(pred_str)
print len(id)
pred = kmean.predict(tfidf_vec)
fo = open(url + "/cluster.txt", "a+")
for i in range(len(pred)):
count2 += 1
fo.write(id[i] + "\t" + str(pred[i]) + "\n")
fo.close()
print "%d+%d" % (count1, count2)
def post_lda(url, cluster):
from gensim import corpora, models, matutils
count = 0
fr = open(url + "/post_key.txt")
fo2 = open(url + "/post_vec_lda.txt", "a+")
id_list = []
data_list = []
for line in fr.readlines():
term = line.strip().split("\t")
if len(term) == 2:
count += 1
id_list.append(term[0])
word = term[1].strip().split()
data_list.append(word)
dic = corpora.Dictionary(data_list)
corpus = [dic.doc2bow(text) for text in data_list] #
tfidf = models.TfidfModel(corpus)
corpus_tfidf = tfidf[corpus]
lda = models.LdaModel(corpus_tfidf, id2word=dic, num_topics=200)
corpus_lda = lda[corpus_tfidf]
num = 0
for doc in corpus_lda:
wstr = ""
for i in range(len(doc)):
item = doc[i]
wstr += str(item[0]) + "," + str(item[1])[0:7] + "/"
fo2.write(id_list[num] + "\t" + wstr[0:-1] + "\n")
num += 1
fr.close()
fo2.close()
print num
if cluster:
lda_csc_matrix = matutils.corpus2csc(corpus_lda).transpose() # gensim sparse matrix to scipy sparse matrix
post_cluster(url, id_list, lda_csc_matrix)
if __name__ == "__main__":
url = "path"
time = time.time()
post_cut(url)
post_tfidf(url)
lda_cluster = False
post_lda(url, lda_cluster)
print time.time() - time
|
mit
| 6,242,895,625,385,736,000
| 26.512605
| 115
| 0.550397
| false
| 3.014733
| false
| false
| false
|
RedHatInsights/insights-core
|
insights/parsers/parted.py
|
1
|
8405
|
"""
PartedL - command ``parted -l``
===============================
This module provides processing for the ``parted`` command. The output is parsed
by the ``PartedL`` class. Attributes are provided for each field for the disk,
and a list of ``Partition`` class objects, one for each partition in the output.
Typical content of the ``parted -l`` command output
looks like::
Model: ATA TOSHIBA MG04ACA4 (scsi)
Disk /dev/sda: 4001GB
Sector size (logical/physical): 512B/512B
Partition Table: gpt
Disk Flags: pmbr_boot
Number Start End Size File system Name Flags
1 1049kB 2097kB 1049kB bios_grub
2 2097kB 526MB 524MB xfs
3 526MB 4001GB 4000GB lvm
The columns may vary depending upon the type of device.
Note:
The examples in this module may be executed with the following command:
``python -m insights.parsers.parted``
Examples:
>>> parted_data = '''
... Model: ATA TOSHIBA MG04ACA4 (scsi)
... Disk /dev/sda: 4001GB
... Sector size (logical/physical): 512B/512B
... Partition Table: gpt
... Disk Flags: pmbr_boot
...
... Number Start End Size File system Name Flags
... 1 1049kB 2097kB 1049kB bios_grub
... 2 2097kB 526MB 524MB xfs
... 3 526MB 4001GB 4000GB lvm
... '''.strip()
>>> from insights.tests import context_wrap
>>> shared = {PartedL: PartedL(context_wrap(parted_data))}
>>> parted_info = shared[PartedL]
>>> parted_info.data
{'partition_table': 'gpt', 'sector_size': '512B/512B', 'disk_flags': 'pmbr_boot', 'partitions': [{'end': '2097kB', 'name': 'bios_grub', 'number': '1', 'start': '1049kB', 'flags': 'bios_grub', 'file_system': 'bios_grub', 'size': '1049kB'}, {'start': '2097kB', 'size': '524MB', 'end': '526MB', 'number': '2', 'file_system': 'xfs'}, {'end': '4001GB', 'name': 'lvm', 'number': '3', 'start': '526MB', 'flags': 'lvm', 'file_system': 'lvm', 'size': '4000GB'}], 'model': 'ATA TOSHIBA MG04ACA4 (scsi)', 'disk': '/dev/sda', 'size': '4001GB'}
>>> parted_info.data['model']
'ATA TOSHIBA MG04ACA4 (scsi)'
>>> parted_info.disk
'/dev/sda'
>>> parted_info.logical_sector_size
'512B'
>>> parted_info.physical_sector_size
'512B'
>>> parted_info.boot_partition
>>> parted_info.data['disk_flags']
'pmbr_boot'
>>> len(parted_info.partitions)
3
>>> parted_info.partitions[0].data
{'end': '2097kB', 'name': 'bios_grub', 'number': '1', 'start': '1049kB', 'flags': 'bios_grub', 'file_system': 'bios_grub', 'size': '1049kB'}
>>> parted_info.partitions[0].number
'1'
>>> parted_info.partitions[0].start
'1049kB'
>>> parted_info.partitions[0].end
'2097kB'
>>> parted_info.partitions[0].size
'1049kB'
>>> parted_info.partitions[0].file_system
'bios_grub'
>>> parted_info.partitions[0].type
>>> parted_info.partitions[0].flags
'bios_grub'
"""
from .. import parser, CommandParser
from ..parsers import ParseException, parse_fixed_table
from insights.specs import Specs
class Partition(object):
"""Class to contain information for one partition.
Represents the values from one row of the partition information from the
``parted`` command. Column names have been converted to lowercase and are
provided as attributes. Column names may vary so the ``get`` method may
be used to check for the presence of a column.
Attributes:
data (dict): Dictionary of partition information keyed by column names
in lowercase.
"""
def __init__(self, data):
self.data = data
@property
def number(self):
"""str: Partition number."""
return self.data.get('number')
@property
def start(self):
"""str: Starting location for the partition."""
return self.data.get('start')
@property
def end(self):
"""str: Ending location for the partition."""
return self.data.get('end')
@property
def size(self):
"""str: Size of the partition."""
return self.data.get('size')
@property
def file_system(self):
"""str: File system type."""
return self.data.get('file_system')
@property
def type(self):
"""str: File system type."""
return self.data.get('type')
@property
def flags(self):
"""str: Partition flags."""
return self.data.get('flags')
def get(self, item):
"""Get information for column ``item`` or ``None`` if not present."""
return self.data.get(item)
@parser(Specs.parted__l)
class PartedL(CommandParser):
"""Class to represent attributes of the ``parted`` command output.
The columns may vary depending upon the type of device.
Attributes:
data (dict): Dictionary of information returned by ``parted`` command.
partitions (list): The partitions found in the output, as Partition
objects.
boot_partition (Partition): the first partition marked as bootable,
or ``None`` if one was not found.
Raises:
ParseException: Raised if ``parted`` output indicates "error" or
"warning" in first line, or if "disk" field is not present, or if
there is an error parsing the data.
ValueError: Raised if there is an error parsing the partition table.
"""
@property
def disk(self):
"""str: Disk information."""
return self.data['disk']
@property
def logical_sector_size(self):
"""str: Logical part of sector size."""
if self._sector_size:
return self._sector_size[0]
@property
def physical_sector_size(self):
"""str: Physical part of sector size."""
if self._sector_size:
return self._sector_size[1]
def get(self, item):
"""Returns a value for the specified ``item`` key."""
return self.data.get(item)
def parse_content(self, content):
# If device was not present output is error message
if content[0].startswith("Error") or content[0].startswith("Warning"):
raise ParseException("PartedL content indicates an error %s" % content[0])
dev_info = {}
table_lines = []
for line in content:
if not line.strip():
continue
if ':' in line:
label_value = line.split(':')
label = label_value[0].strip().lower()
if len(label_value) == 2:
value = label_value[1].strip()
value = value if value else None
# Single word labels
if ' ' not in label:
dev_info[label] = value
else:
if label.startswith("disk") and '/' in label:
disk_parts = label.split()
dev_info['disk'] = disk_parts[1].strip()
dev_info['size'] = value
elif label.startswith("sector"):
dev_info['sector_size'] = value
else:
label = label.replace(' ', '_')
dev_info[label] = value
else:
table_lines.append(line)
if 'disk' not in dev_info:
raise ParseException("PartedL unable to locate Disk in content")
# Now construct the partition table from the fixed table
partitions = []
if table_lines:
table_lines[0] = table_lines[0].replace('File system', 'File_system').lower()
partitions = parse_fixed_table(table_lines)
self.partitions = [Partition(n) for n in partitions]
self.boot_partition = None
self._sector_size = None
# If we got any partitions, find the first boot partition
for part in partitions:
if 'flags' in part and 'boot' in part['flags']:
self.boot_partition = Partition(part)
break
self.data = dev_info
if 'sector_size' in self.data:
self._sector_size = self.data['sector_size'].split('/', 1)
if len(self._sector_size) != 2:
self._sector_size = None
|
apache-2.0
| -4,088,203,973,475,428,400
| 35.385281
| 535
| 0.569423
| false
| 3.820455
| false
| false
| false
|
kfoss/neon
|
neon/models/rnn.py
|
1
|
21431
|
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
Simple recurrent neural network with one hidden layer.
"""
import logging
from neon.backends.backend import Block
from neon.diagnostics.visualize_rnn import VisualizeRNN
from neon.models.mlp import MLP
from neon.util.compat import range
from neon.util.param import req_param
logger = logging.getLogger(__name__)
class RNN(MLP):
"""
**Recurrent Neural Network**
Neon supports standard Recurrent Neural Networks (RNNs) as well as RNNs
with Long Short Term Memory cells (LSTMs). These models are trained on
sequence data, and therefore require a separate dataset format. Neon is
distributed with the Moby Dick dataset, which is a character-based encoding
of the book Moby Dick. Each character is represented in a one-hot encoding
as one of the 128 lowest ASCII chars.
*Dataset format and unrolling*
For the purpose of illustration, assume the entire source text is the 30
characters of ``'Your_shoe_fits_only_your_foot.'``. Note spaces have been
replaced by underscores for readability. To create minibatches of size 2,
we split the data into two subsequences ``'Your_shoe_fits_'`` and
``'only_your_foot.'`` which are treated as separate, independent sequences.
The RNN is trained using truncated back-propagation through time (tBPTT),
which means that the network in unrolled for a on number of steps,
effectively turning it into a deep feed-forward network. To illustrate the
process, consider an unrolling depth of 5 on the text above: The first step
is to break each sequence into short chunks of the unrolling depth:
| ``'Your_' 'shoe_' 'fits_'``
| ``'only_' 'your_' 'foot.'``
The second step is to create minibatches from the columns of this
structure, e.g. the two sequences ``'Your_'`` and ``'only_'`` will form the
first minibatch. This procedure leaves us with 3 minibatches in total.
The reason for using columns rather than rows is that this way we start
processing the independent sequences in parallel. Then, as we move to the
next minibatch, we also move to the next consecutive time step, and
immediately use the hidden state of the network that was computed at the
previous time step.
In the actual neon data format, each letter becomes a one-hot encoded
vector, and thus each chunk is split up into a list over the unrolling
steps, i.e.
| ``'Your_'``
| ``'only_'``
becomes a list of tensors corresponding to the one-hot encodings of
``['Y', 'o'], ['o', 'n'], ['u', 'l'], ['r', 'y'], [' ', ' ']``.
These lists form the elements of another list over the 3 minibatches that
make up the full dataset.
Note that in the more general case of datasets with multiple sequences of
unequal lengths, it would be necessary to pick the minibatch size to be
equal to the number of sequences, and the number of minibatches to be the
length of the sequences. Sequences would need to be padded to the maximum
length with an "empty character" code, e.g. the all-zeros vector rather
than a one-hot encoding.
In the Moby Dick example, the network is trained to predict one character
ahead, so the targets used for training are simply a copy of the inputs
shifted by one character into the future.
"""
def __init__(self, **kwargs):
self.accumulate = True
# Reusing deltas not supported for RNNs yet
self.reuse_deltas = False
super(RNN, self).__init__(**kwargs)
req_param(self, ['unrolls'])
self.rec_layer = self.layers[1]
def link(self, initlayer=None):
"""
link function for the RNN differs from the MLP in that it does not
print the layers
"""
for ll, pl in zip(self.layers, [initlayer] + self.layers[:-1]):
ll.set_previous_layer(pl)
# self.print_layers()
def fit(self, dataset):
viz = VisualizeRNN()
error = self.backend.empty((1, 1))
mb_id = self.backend.empty((1, 1))
self.print_layers()
self.data_layer.init_dataset(dataset)
self.data_layer.use_set('train')
# "output":"input":"rec"
# "lstm_x":"lstm_ih":"lstm_fh":"lstm_oh":"lstm_ch"
self.grad_checker(numgrad="output")
logger.info('commencing model fitting')
errorlist = []
suberrorlist = []
suberror = self.backend.zeros((1, 1))
while self.epochs_complete < self.num_epochs:
self.backend.begin(Block.epoch, self.epochs_complete)
error.fill(0.0)
mb_id = 1
self.data_layer.reset_counter()
while self.data_layer.has_more_data():
self.backend.begin(Block.minibatch, mb_id)
self.reset(mb_id)
self.backend.begin(Block.fprop, mb_id)
self.fprop(debug=(True if (mb_id is -1) else False))
self.backend.end(Block.fprop, mb_id)
self.backend.begin(Block.bprop, mb_id)
self.bprop(debug=(True if (mb_id is -1) else False))
self.backend.end(Block.bprop, mb_id)
self.backend.begin(Block.update, mb_id)
self.update(self.epochs_complete)
self.backend.end(Block.update, mb_id)
self.cost_layer.cost.set_outputbuf(
self.class_layer.output_list[-1])
suberror = self.cost_layer.get_cost()
suberrorlist.append(float(suberror.asnumpyarray()))
self.backend.add(error, suberror, error)
if self.step_print > 0 and mb_id % self.step_print == 0:
logger.info('%d.%d logloss=%0.5f', self.epochs_complete,
mb_id / self.step_print - 1,
float(error.asnumpyarray()) /
self.data_layer.num_batches)
self.backend.end(Block.minibatch, mb_id)
mb_id += 1
self.backend.end(Block.epoch, self.epochs_complete)
self.epochs_complete += 1
errorlist.append(float(error.asnumpyarray()) /
self.data_layer.num_batches)
# self.print_layers(debug=True)
logger.info('epoch: %d, total training error: %0.5f',
self.epochs_complete, float(error.asnumpyarray()) /
self.data_layer.num_batches)
if self.make_plots is True:
self.plot_layers(viz, suberrorlist, errorlist)
self.data_layer.cleanup()
def reset(self, batch):
"""
instead of having a separate buffer for hidden_init, we are now
using the last element output_list[-1] for that.
The shuffle is no longer necessary because fprop directly looks
into the output_list buffer.
"""
if (batch % self.reset_period) == 0 or batch == 1:
self.rec_layer.output_list[-1].fill(0) # reset fprop state
self.rec_layer.deltas.fill(0) # reset bprop (for non-truncated)
if 'c_t' in self.rec_layer.__dict__:
self.rec_layer.c_t[-1].fill(0)
self.rec_layer.celtas.fill(0)
def plot_layers(self, viz, suberrorlist, errorlist):
# generic error plot
viz.plot_error(suberrorlist, errorlist)
# LSTM specific plots
if 'c_t' in self.rec_layer.__dict__:
viz.plot_lstm_wts(self.rec_layer, scale=1.1, fig=4)
viz.plot_lstm_acts(self.rec_layer, scale=21, fig=5)
# RNN specific plots
else:
viz.plot_weights(self.rec_layer.weights.asnumpyarray(),
self.rec_layer.weights_rec.asnumpyarray(),
self.class_layer.weights.asnumpyarray())
viz.plot_activations(self.rec_layer.pre_act_list,
self.rec_layer.output_list,
self.class_layer.pre_act_list,
self.class_layer.output_list,
self.cost_layer.targets)
def fprop(self, debug=False, eps_tau=-1, eps=0,
num_target=None, num_i=0, num_j=0):
"""
Adding numerical gradient functionality here to avoid duplicate fprops.
TODO: Make a version where the for tau loop is inside the layer. The
best way is to have a baseclass for both RNN and LSTM for this.
"""
self.data_layer.fprop(None) # get next mini batch
inputs = self.data_layer.output
y = self.rec_layer.output_list # note: just a shorthand, no copy.
c = [None for k in range(len(y))]
if 'c_t' in self.rec_layer.__dict__:
c = self.rec_layer.c_t
# loop for rec_layer
for tau in range(0, self.unrolls):
if tau == eps_tau:
numpy_target = num_target[num_i, num_j].asnumpyarray()
num_target[num_i, num_j] = (numpy_target + eps)
if debug:
logger.debug("in RNNB.fprop, tau %d, input %d" % (tau,
inputs[tau].asnumpyarray().argmax(0)[0]))
self.rec_layer.fprop(y[tau-1], c[tau-1], inputs[tau], tau)
if tau == eps_tau:
num_target[num_i, num_j] = numpy_target
# loop for class_layer
for tau in range(0, self.unrolls):
if tau == eps_tau:
numpy_target = num_target[num_i, num_j].asnumpyarray()
num_target[num_i, num_j] = (numpy_target + eps)
if debug:
logger.debug("in RNNB.fprop, tau %d, input %d" % (tau,
inputs[tau].asnumpyarray().argmax(0)[0]))
self.class_layer.fprop(y[tau], tau)
if tau == eps_tau:
num_target[num_i, num_j] = numpy_target
# cost layer fprop is a pass.
def bprop(self, debug, numgrad=None):
"""
Parent method for bptt and truncated-bptt. Truncation is neccessary
for the standard RNN as a way to prevent exploding gradients. For the
LSTM it also
"""
if self.truncate:
self.trunc_bprop_tt(debug, numgrad)
else:
self.bprop_tt(debug, numgrad)
def trunc_bprop_tt(self, debug, numgrad=None):
"""
TODO: move the loop over t into the layer class.
"""
if numgrad is None:
min_unroll = 1
else:
logger.debug("MLP.bprop single unrolling for numgrad")
min_unroll = self.unrolls
for tau in range(min_unroll-0, self.unrolls+1):
self.cost_layer.cost.set_outputbuf(
self.class_layer.output_list[tau-1])
self.cost_layer.bprop(None, tau-1)
if debug:
tmp = self.cost_layer.targets[tau-1].asnumpyarray()
tmp = tmp.argmax(0)[0]
logger.debug("in RNNB.bprop, tau %d target %d" % (tau-1, tmp))
error = self.cost_layer.deltas
self.class_layer.bprop(error, tau, numgrad=numgrad)
error = self.class_layer.deltas
for t in list(range(0, tau))[::-1]:
if 'c_t' in self.rec_layer.__dict__:
cerror = self.rec_layer.celtas # on t=0, prev batch state
else:
cerror = None # for normal RNN
self.rec_layer.bprop(error, cerror, t, numgrad=numgrad)
error[:] = self.rec_layer.deltas # [TODO] why need deepcopy?
def bprop_tt(self, debug, numgrad=None):
"""
Keep state over consecutive unrollings. Explodes for RNN, and is not
currently used for anything, but future recurrent layers might use it.
"""
temp1 = self.backend.zeros(self.class_layer.deltas.shape)
temp2 = self.backend.zeros(self.class_layer.deltas.shape)
temp1c = self.backend.zeros(self.class_layer.deltas.shape)
temp2c = self.backend.zeros(self.class_layer.deltas.shape)
for tau in list(range(self.unrolls))[::-1]:
self.cost_layer.cost.set_outputbuf(
self.class_layer.output_list[tau])
self.cost_layer.bprop(None, tau)
cost_error = self.cost_layer.deltas
self.class_layer.bprop(cost_error, tau, numgrad=numgrad)
external_error = self.class_layer.deltas
internal_error = self.rec_layer.deltas
if 'c_t' in self.rec_layer.__dict__:
internal_cerror = self.rec_layer.celtas
external_cerror = self.backend.zeros(temp1.shape)
else:
internal_cerror = None
external_cerror = None
self.rec_layer.bprop(external_error, external_cerror, tau,
numgrad=numgrad)
temp1[:] = self.rec_layer.deltas
if 'c_t' in self.rec_layer.__dict__:
temp1c[:] = self.rec_layer.celtas
self.rec_layer.bprop(internal_error, internal_cerror, tau,
numgrad=numgrad)
temp2[:] = self.rec_layer.deltas
if 'c_t' in self.rec_layer.__dict__:
temp2c[:] = self.rec_layer.celtas
self.backend.add(temp1, temp2, out=self.rec_layer.deltas)
if 'c_t' in self.rec_layer.__dict__:
self.backend.add(temp1c, temp2c, out=self.rec_layer.celtas)
def grad_checker(self, numgrad="lstm_ch"):
"""
Check gradients for LSTM layer:
- W is replicated, only inject the eps once, repeat, average.
bProp is only through the full stack, but wrt. the W in each
level. bProp does this through a for t in tau.
Need a special fprop that injects into one unrolling only.
"""
for layer in self.layers:
logger.info("%s", str(layer))
if numgrad is "output":
num_target = self.class_layer.weights
anl_target = self.class_layer.weight_updates
num_i, num_j = 15, 56
elif numgrad is "input":
num_target = self.rec_layer.weights
anl_target = self.rec_layer.weight_updates
num_i, num_j = 12, 110 # 110 is "n"
elif numgrad is "rec":
num_target = self.rec_layer.weights_rec
anl_target = self.rec_layer.updates_rec
num_i, num_j = 12, 63
elif numgrad is "lstm_x":
num_target = self.rec_layer.Wfx
anl_target = self.rec_layer.Wfx_updates
num_i, num_j = 12, 110
elif numgrad is "lstm_ih":
num_target = self.rec_layer.Wih
anl_target = self.rec_layer.Wih_updates
num_i, num_j = 12, 55
elif numgrad is "lstm_fh":
num_target = self.rec_layer.Wfh
anl_target = self.rec_layer.Wfh_updates
num_i, num_j = 12, 55
elif numgrad is "lstm_oh":
num_target = self.rec_layer.Woh
anl_target = self.rec_layer.Woh_updates
num_i, num_j = 12, 55
elif numgrad is "lstm_ch":
num_target = self.rec_layer.Wch
anl_target = self.rec_layer.Wch_updates
num_i, num_j = 12, 55
eps = 1e-6 # better to use float64 in cpu.py for this
numerical = 0 # initialize buffer
# loop to inject epsilon in different unrolling stages
for eps_tau in range(0, self.unrolls):
self.reset(1) # clear hidden input
self.fprop(debug=False, eps_tau=eps_tau, eps=0,
num_target=num_target, num_i=num_i, num_j=num_j)
self.cost_layer.set_targets()
self.data_layer.reset_counter()
self.cost_layer.cost.set_outputbuf(
self.class_layer.output_list[-1])
suberror_eps = self.cost_layer.get_cost().asnumpyarray()
self.reset(1)
self.fprop(debug=False, eps_tau=eps_tau, eps=eps,
num_target=num_target, num_i=num_i, num_j=num_j)
self.data_layer.reset_counter()
self.cost_layer.cost.set_outputbuf(
self.class_layer.output_list[-1])
suberror_ref = self.cost_layer.get_cost().asnumpyarray()
num_part = (suberror_eps - suberror_ref) / eps
logger.debug("numpart for eps_tau=%d of %d is %e",
eps_tau, self.unrolls, num_part)
numerical += num_part
# bprop for analytical gradient
self.bprop(debug=False, numgrad=numgrad)
analytical = anl_target[num_i, num_j].asnumpyarray()
logger.debug("---------------------------------------------")
logger.debug("RNN grad_checker: suberror_eps %f", suberror_eps)
logger.debug("RNN grad_checker: suberror_ref %f", suberror_ref)
logger.debug("RNN grad_checker: numerical %e", numerical)
logger.debug("RNN grad_checker: analytical %e", analytical)
logger.debug("RNN grad_checker: ratio %e", 1./(numerical/analytical))
logger.debug("---------------------------------------------")
def predict_generator(self, dataset, setname):
"""
Generate flattened predicitons and true labels for the given dataset,
one mini-batch at a time.
Agruments:
dataset: A neon dataset instance
setname: Which set to compute predictions for (test, train, val)
Returns:
tuple: on each call will yield a 2-tuple of outputs and references.
The first item is the model probabilities for each class,
and the second item is either the one-hot or raw labels with
ground truth.
See Also:
predict_fullset
"""
self.data_layer.init_dataset(dataset)
assert self.data_layer.has_set(setname)
self.data_layer.use_set(setname, predict=True)
self.data_layer.reset_counter()
predlabels = self.backend.empty((1, self.batch_size))
labels = self.backend.empty((1, self.batch_size))
# TODO: find some alternate way of re-assembling data that doesn't
# require allocating space for the entire dataset.
outputs_pred = self.backend.zeros((self.data_layer.num_batches *
self.unrolls, self.batch_size))
outputs_targ = self.backend.zeros((self.data_layer.num_batches *
self.unrolls, self.batch_size))
mb_id = 0
self.data_layer.reset_counter()
while self.data_layer.has_more_data():
mb_id += 1
self.reset(mb_id)
self.fprop(debug=False)
# time unrolling loop to disseminate fprop results
for tau in range(self.unrolls):
probs = self.class_layer.output_list[tau]
targets = self.data_layer.targets[tau]
self.backend.argmax(targets, axis=0, out=labels)
self.backend.argmax(probs, axis=0, out=predlabels)
# collect batches to re-assemble continuous data
idx = self.unrolls * (mb_id - 1) + tau
outputs_pred[idx, :] = predlabels
outputs_targ[idx, :] = labels
self.data_layer.cleanup()
# flatten the 2d predictions into our canonical 1D format
pred_flat = outputs_pred.transpose().reshape((1, -1))
targ_flat = outputs_targ.transpose().reshape((1, -1))
for i in range(self.data_layer.num_batches):
start = i * self.unrolls * self.batch_size
end = start + (self.unrolls * self.batch_size)
yield (pred_flat[start:end], targ_flat[start:end])
def write_string(self, pred, targ, setname):
""" For text prediction, reassemble the batches and print out a
short contigous segment of target text and predicted text - useful
to check for off-by-one errors and the like"""
import numpy as np
pred_int = pred[0, 2:40].asnumpyarray().ravel().astype(np.int8)
targ_int = targ[0, 2:40].asnumpyarray().ravel().astype(np.int8)
# remove special characters, replace them with '#'
pred_int[pred_int < 32] = 35
targ_int[targ_int < 32] = 35
# create output strings
logging.info("the target for '%s' is: '%s'", setname,
''.join(targ_int.view('c')))
logging.info("prediction for '%s' is: '%s'", setname,
''.join(pred_int.view('c')))
|
apache-2.0
| 9,129,896,781,727,219,000
| 43.741127
| 79
| 0.575101
| false
| 3.84965
| false
| false
| false
|
LamaHamadeh/Microsoft-DAT210x
|
Module 5/assignment4.py
|
1
|
10263
|
'''
author Lama Hamadeh
'''
import numpy as np
import pandas as pd
from sklearn import preprocessing
from sklearn.cluster import KMeans
import matplotlib.pyplot as plt
import matplotlib
#
# TODO: Parameters to play around with
PLOT_TYPE_TEXT = False # If you'd like to see indices
PLOT_VECTORS = True # If you'd like to see your original features in P.C.-Space
matplotlib.style.use('ggplot') # Look Pretty
c = ['red', 'green', 'blue', 'orange', 'yellow', 'brown']
def drawVectors(transformed_features, components_, columns, plt):
num_columns = len(columns)
# This function will project your *original* feature (columns)
# onto your principal component feature-space, so that you can
# visualize how "important" each one was in the
# multi-dimensional scaling
# Scale the principal components by the max value in
# the transformed set belonging to that component
xvector = components_[0] * max(transformed_features[:,0])
yvector = components_[1] * max(transformed_features[:,1])
## Visualize projections
# Sort each column by its length. These are your *original*
# columns, not the principal components.
import math
important_features = { columns[i] : math.sqrt(xvector[i]**2 + yvector[i]**2) for i in range(num_columns) }
important_features = sorted(zip(important_features.values(), important_features.keys()), reverse=True)
print "Projected Features by importance:\n", important_features
ax = plt.axes()
for i in range(num_columns):
# Use an arrow to project each original feature as a
# labeled vector on your principal component axes
plt.arrow(0, 0, xvector[i], yvector[i], color='b', width=0.0005, head_width=0.02, alpha=0.75, zorder=600000)
plt.text(xvector[i]*1.2, yvector[i]*1.2, list(columns)[i], color='b', alpha=0.75, zorder=600000)
return ax
def doPCA(data, dimensions=2):
from sklearn.decomposition import RandomizedPCA
model = RandomizedPCA(n_components=dimensions)
model.fit(data)
return model
def doKMeans(data, clusters=0):
#
# TODO: Do the KMeans clustering here, passing in the # of clusters parameter
# and fit it against your data. Then, return a tuple containing the cluster
# centers and the labels
#
# .. your code here ..
model = KMeans(n_clusters=clusters)
labels = model.fit_predict(data)
return model.cluster_centers_, model.labels_
#
# TODO: Load up the dataset. It has may or may not have nans in it. Make
# sure you catch them and destroy them, by setting them to '0'. This is valid
# for this dataset, since if the value is missing, you can assume no $ was spent
# on it.
#
# .. your code here ..
df=pd.read_csv('/Users/ADB3HAMADL/Desktop/Anaconda_Packages/DAT210x-master/Module5/Datasets/Wholesale customers data.csv')
df.dropna(axis = 0, how = 'any', inplace = True)
#
# TODO: As instructed, get rid of the 'Channel' and 'Region' columns, since
# you'll be investigating as if this were a single location wholesaler, rather
# than a national / international one. Leaving these fields in here would cause
# KMeans to examine and give weight to them.
#
# .. your code here ..
df.drop(['Channel','Region'],inplace=True,axis=1)
print(df.dtypes)
print(df.head())
#
# TODO: Before unitizing / standardizing / normalizing your data in preparation for
# K-Means, it's a good idea to get a quick peek at it. You can do this using the
# .describe() method, or even by using the built-in pandas df.plot.hist()
#
# .. your code here ..
print(df.describe())
print(df.plot.hist())
#
# INFO: Having checked out your data, you may have noticed there's a pretty big gap
# between the top customers in each feature category and the rest. Some feature
# scaling algos won't get rid of outliers for you, so it's a good idea to handle that
# manually---particularly if your goal is NOT to determine the top customers. After
# all, you can do that with a simple Pandas .sort_values() and not a machine
# learning clustering algorithm. From a business perspective, you're probably more
# interested in clustering your +/- 2 standard deviation customers, rather than the
# creme dela creme, or bottom of the barrel'ers
#
# Remove top 5 and bottom 5 samples for each column:
drop = {}
for col in df.columns:
# Bottom 5
sort = df.sort_values(by=col, ascending=True)
if len(sort) > 5: sort=sort[:5]
for index in sort.index: drop[index] = True # Just store the index once
# Top 5
sort = df.sort_values(by=col, ascending=False)
if len(sort) > 5: sort=sort[:5]
for index in sort.index: drop[index] = True # Just store the index once
#
# INFO Drop rows by index. We do this all at once in case there is a
# collision. This way, we don't end up dropping more rows than we have
# to, if there is a single row that satisfies the drop for multiple columns.
# Since there are 6 rows, if we end up dropping < 5*6*2 = 60 rows, that means
# there indeed were collisions.
print "Dropping {0} Outliers...".format(len(drop))
df.drop(inplace=True, labels=drop.keys(), axis=0)
print df.describe()
#
# INFO: What are you interested in?
#
# Depending on what you're interested in, you might take a different approach
# to normalizing/standardizing your data.
#
# You should note that all columns left in the dataset are of the same unit.
# You might ask yourself, do I even need to normalize / standardize the data?
# The answer depends on what you're trying to accomplish. For instance, although
# all the units are the same (generic money unit), the price per item in your
# store isn't. There may be some cheap items and some expensive one. If your goal
# is to find out what items people buy tend to buy together but you didn't
# unitize properly before running kMeans, the contribution of the lesser priced
# item would be dwarfed by the more expensive item.
#
# For a great overview on a few of the normalization methods supported in SKLearn,
# please check out: https://stackoverflow.com/questions/30918781/right-function-for-normalizing-input-of-sklearn-svm
#
# Suffice to say, at the end of the day, you're going to have to know what question
# you want answered and what data you have available in order to select the best
# method for your purpose. Luckily, SKLearn's interfaces are easy to switch out
# so in the mean time, you can experiment with all of them and see how they alter
# your results.
#
#
# 5-sec summary before you dive deeper online:
#
# NORMALIZATION: Let's say your user spend a LOT. Normalization divides each item by
# the average overall amount of spending. Stated differently, your
# new feature is = the contribution of overall spending going into
# that particular item: $spent on feature / $overall spent by sample
#
# MINMAX: What % in the overall range of $spent by all users on THIS particular
# feature is the current sample's feature at? When you're dealing with
# all the same units, this will produce a near face-value amount. Be
# careful though: if you have even a single outlier, it can cause all
# your data to get squashed up in lower percentages.
# Imagine your buyers usually spend $100 on wholesale milk, but today
# only spent $20. This is the relationship you're trying to capture
# with MinMax. NOTE: MinMax doesn't standardize (std. dev.); it only
# normalizes / unitizes your feature, in the mathematical sense.
# MinMax can be used as an alternative to zero mean, unit variance scaling.
# [(sampleFeatureValue-min) / (max-min)] * (max-min) + min
# Where min and max are for the overall feature values for all samples.
#
# TODO: Un-comment just ***ONE*** of lines at a time and see how alters your results
# Pay attention to the direction of the arrows, as well as their LENGTHS
#T = preprocessing.StandardScaler().fit_transform(df)
#T = preprocessing.MinMaxScaler().fit_transform(df)
#T = preprocessing.MaxAbsScaler().fit_transform(df)
#T = preprocessing.Normalizer().fit_transform(df)
T = preprocessing.Normalizer().fit_transform(df) # No Change
#
# INFO: Sometimes people perform PCA before doing KMeans, so that KMeans only
# operates on the most meaningful features. In our case, there are so few features
# that doing PCA ahead of time isn't really necessary, and you can do KMeans in
# feature space. But keep in mind you have the option to transform your data to
# bring down its dimensionality. If you take that route, then your Clusters will
# already be in PCA-transformed feature space, and you won't have to project them
# again for visualization.
# Do KMeans
n_clusters = 3
centroids, labels = doKMeans(T, n_clusters)
#
# TODO: Print out your centroids. They're currently in feature-space, which
# is good. Print them out before you transform them into PCA space for viewing
#
# .. your code here ..
print centroids
# Do PCA *after* to visualize the results. Project the centroids as well as
# the samples into the new 2D feature space for visualization purposes.
display_pca = doPCA(T)
T = display_pca.transform(T)
CC = display_pca.transform(centroids)
# Visualize all the samples. Give them the color of their cluster label
fig = plt.figure()
ax = fig.add_subplot(111)
if PLOT_TYPE_TEXT:
# Plot the index of the sample, so you can further investigate it in your dset
for i in range(len(T)): ax.text(T[i,0], T[i,1], df.index[i], color=c[labels[i]], alpha=0.75, zorder=600000)
ax.set_xlim(min(T[:,0])*1.2, max(T[:,0])*1.2)
ax.set_ylim(min(T[:,1])*1.2, max(T[:,1])*1.2)
else:
# Plot a regular scatter plot
sample_colors = [ c[labels[i]] for i in range(len(T)) ]
ax.scatter(T[:, 0], T[:, 1], c=sample_colors, marker='o', alpha=0.2)
# Plot the Centroids as X's, and label them
ax.scatter(CC[:, 0], CC[:, 1], marker='x', s=169, linewidths=3, zorder=1000, c=c)
for i in range(len(centroids)): ax.text(CC[i, 0], CC[i, 1], str(i), zorder=500010, fontsize=18, color=c[i])
# Display feature vectors for investigation:
if PLOT_VECTORS: drawVectors(T, display_pca.components_, df.columns, plt)
# Add the cluster label back into the dataframe and display it:
df['label'] = pd.Series(labels, index=df.index)
print df
plt.show()
|
mit
| -4,313,806,507,492,444,000
| 40.216867
| 122
| 0.714508
| false
| 3.503926
| false
| false
| false
|
hardc0d3/sppy
|
sppy_test/open_env_get_db_cursor_ctl_close.py
|
1
|
1038
|
from sppy.spapi_cffi import SophiaApi
from sppy.spapi_cffi_cdef import sophia_api_cdefs
from sppy.spapi_cffi_codecs import *
sp = SophiaApi( '../../sophia/sophia1.2.2/libsophia.so.1.2.2',sophia_api_cdefs['1.2.2'] )
codec_u32 = U32(sp.ffi)
dbname = 'test'
env = sp.env()
print "get env object",env.cd
typ = sp.type(env)
print "type env env?",typ.decode(0)
ctl = sp.ctl(env)
typ = sp.type(ctl)
print "type of ctl?",typ.decode(0)
rc = sp.set( ctl, "sophia.path", "../test_data/" )
print "set ctl path", rc._(0)
rc = sp.open( env )
print "open env",rc._(0)
rc = sp.set( ctl, "db", dbname )
print "set ctl db name:%s"%dbname,rc
db = sp.get( ctl, "db.%s"%dbname )
print "get ctl db.%s"%dbname,db.cd
cursor = sp.cursor(ctl)
print cursor.cd
o = sp.get(cursor)
szk = sp.ffi.new("uint32_t*")
szv = sp.ffi.new("uint32_t*")
while o.cd != sp.ffi.NULL:
key = sp.get(o,"key",szk)
val = sp.get(o,"value",szv)
print key._(szk[0]),val._(szv[0])
o = sp.get(cursor)
#print o.cd
rc = sp.destroy(env)
print "destroy env",rc
|
bsd-2-clause
| -2,561,781,173,405,216,300
| 18.222222
| 89
| 0.631021
| false
| 2.237069
| false
| false
| false
|
wdmchaft/taskcoach
|
taskcoachlib/mailer/outlook.py
|
1
|
2591
|
'''
Task Coach - Your friendly task manager
Copyright (C) 2004-2010 Task Coach developers <developers@taskcoach.org>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os, stat, tempfile
from taskcoachlib import persistence
if os.name == 'nt':
from win32com.client import GetActiveObject
def getCurrentSelection():
obj = GetActiveObject('Outlook.Application')
exp = obj.ActiveExplorer()
sel = exp.Selection
ret = []
for n in xrange(1, sel.Count + 1):
src = tempfile.NamedTemporaryFile(suffix='.eml') # Will be deleted automagically
src.close()
sel.Item(n).SaveAs(src.name, 0)
src = file(src.name, 'rb')
# Okay. In the case of HTML mails, Outlook doesn't put
# a blank line between the last header line and the
# body. This assumes that the last header is
# Subject:. Hope it's true.
# States:
# 0 still in headers
# 1 subject: header seen, blank line not written
# 2 all headers seen, blank line written
# 2 in body
name = persistence.get_temp_file(suffix='.eml')
dst = file(name, 'wb')
try:
s = 0
for line in src:
if s == 0:
dst.write(line)
if line.lower().startswith('subject:'):
dst.write('X-Outlook-ID: %s\r\n' % str(sel.Item(n).EntryID))
s = 1
elif s == 1:
dst.write('\r\n')
if line.strip() != '':
dst.write(line)
s = 2
else:
dst.write(line)
finally:
dst.close()
if os.name == 'nt':
os.chmod(name, stat.S_IREAD)
ret.append(name)
return ret
|
gpl-3.0
| -6,198,454,282,548,796,000
| 34.986111
| 92
| 0.542648
| false
| 4.413969
| false
| false
| false
|
daq-tools/kotori
|
test/test_vendor_hiveeyes.py
|
1
|
2713
|
# -*- coding: utf-8 -*-
# (c) 2020-2021 Andreas Motl <andreas@getkotori.org>
import logging
import pytest
import pytest_twisted
from bunch import Bunch
from test.conftest import create_machinery
from test.settings.mqttkit import PROCESS_DELAY_MQTT
from test.util import mqtt_json_sensor, sleep, InfluxWrapper, GrafanaWrapper
logger = logging.getLogger(__name__)
settings = Bunch(
influx_database='hiveeyes_itest',
influx_measurement='site_box_sensors',
mqtt_topic='hiveeyes/itest/site/box/data.json',
grafana_username='admin',
grafana_password='admin',
grafana_dashboards=['hiveeyes-itest-site-box', 'hiveeyes-itest'],
)
influx = InfluxWrapper(database=settings.influx_database, measurement=settings.influx_measurement)
grafana = GrafanaWrapper(settings=settings)
machinery_hiveeyes = create_machinery('./etc/test/hiveeyes.ini')
create_influxdb_hiveeyes = influx.make_create_db()
reset_influxdb_hiveeyes = influx.make_reset_measurement()
reset_grafana_hiveeyes = grafana.make_reset()
@pytest_twisted.inlineCallbacks
@pytest.mark.hiveeyes
def test_mqtt_to_grafana(machinery_hiveeyes, create_influxdb_hiveeyes, reset_influxdb_hiveeyes, reset_grafana_hiveeyes):
"""
Publish a single reading in JSON format to MQTT and proof
- it is stored in the InfluxDB database.
- a corresponding datasource and dashboards have been created in Grafana.
"""
# Submit a single measurement, without timestamp.
data = {
'temperature': 42.84,
'weight': 33.33,
}
yield mqtt_json_sensor(settings.mqtt_topic, data)
# Wait for some time to process the message.
yield sleep(PROCESS_DELAY_MQTT)
# Wait for Grafana to create its artefacts.
yield sleep(2)
# Proof that data arrived in InfluxDB.
record = influx.get_first_record()
del record['time']
assert record == {u'temperature': 42.84, u'weight': 33.33}
yield record
# Proof that Grafana is well provisioned.
logger.info('Grafana: Checking datasource')
datasource_names = []
for datasource in grafana.client.datasources.get():
datasource_names.append(datasource['name'])
assert settings.influx_database in datasource_names
logger.info('Grafana: Checking dashboards')
for dashboard_name in settings.grafana_dashboards:
dashboard = grafana.client.dashboards.db[dashboard_name].get()['dashboard']
if 'rows' in dashboard:
umbrella = dashboard['rows'][0]
else:
umbrella = dashboard
target = umbrella['panels'][0]['targets'][0]
#assert target['measurement'] == settings.influx_measurement
assert 'temperature' in target['query'] or 'weight' in target['query']
|
agpl-3.0
| 8,705,837,564,512,757,000
| 33.782051
| 120
| 0.709915
| false
| 3.429836
| true
| false
| false
|
SRabbelier/Melange
|
thirdparty/google_appengine/google/appengine/dist/py_zipimport.py
|
1
|
9300
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pure Python zipfile importer.
This approximates the standard zipimport module, which isn't supported
by Google App Engine. See PEP 302 for more information about the API
for import hooks.
Usage:
import py_zipimport
As a side effect of importing, the module overrides sys.path_hooks,
and also creates an alias 'zipimport' for itself. When your app is
running in Google App Engine production, you don't even need to import
it, since this is already done for you. In the Google App Engine SDK
this module is not used; instead, the standard zipimport module is
used.
"""
__all__ = ['ZipImportError', 'zipimporter']
import os
import sys
import types
import UserDict
import zipfile
_SEARCH_ORDER = [
('.py', False),
('/__init__.py', True),
]
_zipfile_cache = {}
class ZipImportError(ImportError):
"""Exception raised by zipimporter objects."""
class zipimporter:
"""A PEP-302-style importer that can import from a zipfile.
Just insert or append this class (not an instance) to sys.path_hooks
and you're in business. Instances satisfy both the 'importer' and
'loader' APIs specified in PEP 302.
"""
def __init__(self, path_entry):
"""Constructor.
Args:
path_entry: The entry in sys.path. This should be the name of an
existing zipfile possibly with a path separator and a prefix
path within the archive appended, e.g. /x/django.zip or
/x/django.zip/foo/bar.
Raises:
ZipImportError if the path_entry does not represent a valid
zipfile with optional prefix.
"""
archive = path_entry
prefix = ''
while not os.path.lexists(archive):
head, tail = os.path.split(archive)
if head == archive:
msg = 'Nothing found for %r' % path_entry
raise ZipImportError(msg)
archive = head
prefix = os.path.join(tail, prefix)
if not os.path.isfile(archive):
msg = 'Non-file %r found for %r' % (archive, path_entry)
raise ZipImportError(msg)
self.archive = archive
self.prefix = os.path.join(prefix, '')
self.zipfile = _zipfile_cache.get(archive)
if self.zipfile is None:
try:
self.zipfile = zipfile.ZipFile(self.archive)
except (EnvironmentError, zipfile.BadZipfile), err:
msg = 'Can\'t open zipfile %s: %s: %s' % (self.archive,
err.__class__.__name__, err)
import logging
logging.warn(msg)
raise ZipImportError(msg)
else:
_zipfile_cache[archive] = self.zipfile
import logging
logging.info('zipimporter(%r, %r)', archive, prefix)
def __repr__(self):
"""Return a string representation matching zipimport.c."""
name = self.archive
if self.prefix:
name = os.path.join(name, self.prefix)
return '<zipimporter object "%s">' % name
def _get_info(self, fullmodname):
"""Internal helper for find_module() and load_module().
Args:
fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
Returns:
A tuple (submodname, is_package, relpath) where:
submodname: The final component of the module name, e.g. 'mail'.
is_package: A bool indicating whether this is a package.
relpath: The path to the module's source code within to the zipfile.
Raises:
ImportError if the module is not found in the archive.
"""
parts = fullmodname.split('.')
submodname = parts[-1]
for suffix, is_package in _SEARCH_ORDER:
relpath = os.path.join(self.prefix,
submodname + suffix.replace('/', os.sep))
try:
self.zipfile.getinfo(relpath.replace(os.sep, '/'))
except KeyError:
pass
else:
return submodname, is_package, relpath
msg = ('Can\'t find module %s in zipfile %s with prefix %r' %
(fullmodname, self.archive, self.prefix))
raise ZipImportError(msg)
def _get_source(self, fullmodname):
"""Internal helper for load_module().
Args:
fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
Returns:
A tuple (submodname, is_package, fullpath, source) where:
submodname: The final component of the module name, e.g. 'mail'.
is_package: A bool indicating whether this is a package.
fullpath: The path to the module's source code including the
zipfile's filename.
source: The module's source code.
Raises:
ImportError if the module is not found in the archive.
"""
submodname, is_package, relpath = self._get_info(fullmodname)
fullpath = '%s%s%s' % (self.archive, os.sep, relpath)
source = self.zipfile.read(relpath.replace(os.sep, '/'))
source = source.replace('\r\n', '\n')
source = source.replace('\r', '\n')
return submodname, is_package, fullpath, source
def find_module(self, fullmodname, path=None):
"""PEP-302-compliant find_module() method.
Args:
fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
path: Optional and ignored; present for API compatibility only.
Returns:
None if the module isn't found in the archive; self if it is found.
"""
try:
submodname, is_package, relpath = self._get_info(fullmodname)
except ImportError:
return None
else:
return self
def load_module(self, fullmodname):
"""PEP-302-compliant load_module() method.
Args:
fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.
Returns:
The module object constructed from the source code.
Raises:
SyntaxError if the module's source code is syntactically incorrect.
ImportError if there was a problem accessing the source code.
Whatever else can be raised by executing the module's source code.
"""
submodname, is_package, fullpath, source = self._get_source(fullmodname)
code = compile(source, fullpath, 'exec')
mod = sys.modules.get(fullmodname)
try:
if mod is None:
mod = sys.modules[fullmodname] = types.ModuleType(fullmodname)
mod.__loader__ = self
mod.__file__ = fullpath
mod.__name__ = fullmodname
if is_package:
mod.__path__ = [os.path.dirname(mod.__file__)]
exec code in mod.__dict__
except:
if fullmodname in sys.modules:
del sys.modules[fullmodname]
raise
return mod
def get_data(self, fullpath):
"""Return (binary) content of a data file in the zipfile."""
prefix = os.path.join(self.archive, '')
if fullpath.startswith(prefix):
relpath = fullpath[len(prefix):]
elif os.path.isabs(fullpath):
raise IOError('Absolute path %r doesn\'t start with zipfile name %r' %
(fullpath, prefix))
else:
relpath = fullpath
try:
return self.zipfile.read(relpath.replace(os.sep, '/'))
except KeyError:
raise IOError('Path %r not found in zipfile %r' %
(relpath, self.archive))
def is_package(self, fullmodname):
"""Return whether a module is a package."""
submodname, is_package, relpath = self._get_info(fullmodname)
return is_package
def get_code(self, fullmodname):
"""Return bytecode for a module."""
submodname, is_package, fullpath, source = self._get_source(fullmodname)
return compile(source, fullpath, 'exec')
def get_source(self, fullmodname):
"""Return source code for a module."""
submodname, is_package, fullpath, source = self._get_source(fullmodname)
return source
class ZipFileCache(UserDict.DictMixin):
"""Helper class to export archive data in _zip_directory_cache.
Just take the info from _zipfile_cache and convert it as required.
"""
def __init__(self, archive):
_zipfile_cache[archive]
self._archive = archive
def keys(self):
return _zipfile_cache[self._archive].namelist()
def __getitem__(self, filename):
info = _zipfile_cache[self._archive].getinfo(filename.replace(os.sep, '/'))
dt = info.date_time
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
return (os.path.join(self._archive, info.filename), info.compress_type,
info.compress_size, info.file_size, info.header_offset, dostime,
dosdate, info.CRC)
class ZipDirectoryCache(UserDict.DictMixin):
"""Helper class to export _zip_directory_cache."""
def keys(self):
return _zipfile_cache.keys()
def __getitem__(self, archive):
return ZipFileCache(archive)
_zip_directory_cache = ZipDirectoryCache()
sys.modules['zipimport'] = sys.modules[__name__]
sys.path_hooks[:] = [zipimporter]
|
apache-2.0
| 6,846,432,237,036,810,000
| 30.958763
| 79
| 0.653548
| false
| 3.881469
| false
| false
| false
|
jseabold/statsmodels
|
statsmodels/sandbox/stats/contrast_tools.py
|
5
|
28790
|
'''functions to work with contrasts for multiple tests
contrast matrices for comparing all pairs, all levels to reference level, ...
extension to 2-way groups in progress
TwoWay: class for bringing two-way analysis together and try out
various helper functions
Idea for second part
- get all transformation matrices to move in between different full rank
parameterizations
- standardize to one parameterization to get all interesting effects.
- multivariate normal distribution
- exploit or expand what we have in LikelihoodResults, cov_params, f_test,
t_test, example: resols_dropf_full.cov_params(C2)
- connect to new multiple comparison for contrast matrices, based on
multivariate normal or t distribution (Hothorn, Bretz, Westfall)
'''
from numpy.testing import assert_equal
import numpy as np
#next 3 functions copied from multicomp.py
def contrast_allpairs(nm):
'''contrast or restriction matrix for all pairs of nm variables
Parameters
----------
nm : int
Returns
-------
contr : ndarray, 2d, (nm*(nm-1)/2, nm)
contrast matrix for all pairwise comparisons
'''
contr = []
for i in range(nm):
for j in range(i+1, nm):
contr_row = np.zeros(nm)
contr_row[i] = 1
contr_row[j] = -1
contr.append(contr_row)
return np.array(contr)
def contrast_all_one(nm):
'''contrast or restriction matrix for all against first comparison
Parameters
----------
nm : int
Returns
-------
contr : ndarray, 2d, (nm-1, nm)
contrast matrix for all against first comparisons
'''
contr = np.column_stack((np.ones(nm-1), -np.eye(nm-1)))
return contr
def contrast_diff_mean(nm):
'''contrast or restriction matrix for all against mean comparison
Parameters
----------
nm : int
Returns
-------
contr : ndarray, 2d, (nm-1, nm)
contrast matrix for all against mean comparisons
'''
return np.eye(nm) - np.ones((nm,nm))/nm
def signstr(x, noplus=False):
if x in [-1,0,1]:
if not noplus:
return '+' if np.sign(x)>=0 else '-'
else:
return '' if np.sign(x)>=0 else '-'
else:
return str(x)
def contrast_labels(contrasts, names, reverse=False):
if reverse:
sl = slice(None, None, -1)
else:
sl = slice(None)
labels = [''.join(['%s%s' % (signstr(c, noplus=True),v)
for c,v in zip(row, names)[sl] if c != 0])
for row in contrasts]
return labels
def contrast_product(names1, names2, intgroup1=None, intgroup2=None, pairs=False):
'''build contrast matrices for products of two categorical variables
this is an experimental script and should be converted to a class
Parameters
----------
names1, names2 : lists of strings
contains the list of level labels for each categorical variable
intgroup1, intgroup2 : ndarrays TODO: this part not tested, finished yet
categorical variable
Notes
-----
This creates a full rank matrix. It does not do all pairwise comparisons,
parameterization is using contrast_all_one to get differences with first
level.
? does contrast_all_pairs work as a plugin to get all pairs ?
'''
n1 = len(names1)
n2 = len(names2)
names_prod = ['%s_%s' % (i,j) for i in names1 for j in names2]
ee1 = np.zeros((1,n1))
ee1[0,0] = 1
if not pairs:
dd = np.r_[ee1, -contrast_all_one(n1)]
else:
dd = np.r_[ee1, -contrast_allpairs(n1)]
contrast_prod = np.kron(dd[1:], np.eye(n2))
names_contrast_prod0 = contrast_labels(contrast_prod, names_prod, reverse=True)
names_contrast_prod = [''.join(['%s%s' % (signstr(c, noplus=True),v)
for c,v in zip(row, names_prod)[::-1] if c != 0])
for row in contrast_prod]
ee2 = np.zeros((1,n2))
ee2[0,0] = 1
#dd2 = np.r_[ee2, -contrast_all_one(n2)]
if not pairs:
dd2 = np.r_[ee2, -contrast_all_one(n2)]
else:
dd2 = np.r_[ee2, -contrast_allpairs(n2)]
contrast_prod2 = np.kron(np.eye(n1), dd2[1:])
names_contrast_prod2 = [''.join(['%s%s' % (signstr(c, noplus=True),v)
for c,v in zip(row, names_prod)[::-1] if c != 0])
for row in contrast_prod2]
if (intgroup1 is not None) and (intgroup1 is not None):
d1, _ = dummy_1d(intgroup1)
d2, _ = dummy_1d(intgroup2)
dummy = dummy_product(d1, d2)
else:
dummy = None
return (names_prod, contrast_prod, names_contrast_prod,
contrast_prod2, names_contrast_prod2, dummy)
def dummy_1d(x, varname=None):
'''dummy variable for id integer groups
Parameters
----------
x : ndarray, 1d
categorical variable, requires integers if varname is None
varname : str
name of the variable used in labels for category levels
Returns
-------
dummy : ndarray, 2d
array of dummy variables, one column for each level of the
category (full set)
labels : list[str]
labels for the columns, i.e. levels of each category
Notes
-----
use tools.categorical instead for more more options
See Also
--------
statsmodels.tools.categorical
Examples
--------
>>> x = np.array(['F', 'F', 'M', 'M', 'F', 'F', 'M', 'M', 'F', 'F', 'M', 'M'],
dtype='|S1')
>>> dummy_1d(x, varname='gender')
(array([[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1]]), ['gender_F', 'gender_M'])
'''
if varname is None: #assumes integer
labels = ['level_%d' % i for i in range(x.max() + 1)]
return (x[:,None]==np.arange(x.max()+1)).astype(int), labels
else:
grouplabels = np.unique(x)
labels = [varname + '_%s' % str(i) for i in grouplabels]
return (x[:,None]==grouplabels).astype(int), labels
def dummy_product(d1, d2, method='full'):
'''dummy variable from product of two dummy variables
Parameters
----------
d1, d2 : ndarray
two dummy variables, assumes full set for methods 'drop-last'
and 'drop-first'
method : {'full', 'drop-last', 'drop-first'}
'full' returns the full product, encoding of intersection of
categories.
The drop methods provide a difference dummy encoding:
(constant, main effects, interaction effects). The first or last columns
of the dummy variable (i.e. levels) are dropped to get full rank
dummy matrix.
Returns
-------
dummy : ndarray
dummy variable for product, see method
'''
if method == 'full':
dd = (d1[:,:,None]*d2[:,None,:]).reshape(d1.shape[0],-1)
elif method == 'drop-last': #same as SAS transreg
d12rl = dummy_product(d1[:,:-1], d2[:,:-1])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,:-1], d2[:,:-1],d12rl))
#Note: dtype int should preserve dtype of d1 and d2
elif method == 'drop-first':
d12r = dummy_product(d1[:,1:], d2[:,1:])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,1:], d2[:,1:],d12r))
else:
raise ValueError('method not recognized')
return dd
def dummy_limits(d):
'''start and endpoints of groups in a sorted dummy variable array
helper function for nested categories
Examples
--------
>>> d1 = np.array([[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1]])
>>> dummy_limits(d1)
(array([0, 4, 8]), array([ 4, 8, 12]))
get group slices from an array
>>> [np.arange(d1.shape[0])[b:e] for b,e in zip(*dummy_limits(d1))]
[array([0, 1, 2, 3]), array([4, 5, 6, 7]), array([ 8, 9, 10, 11])]
>>> [np.arange(d1.shape[0])[b:e] for b,e in zip(*dummy_limits(d1))]
[array([0, 1, 2, 3]), array([4, 5, 6, 7]), array([ 8, 9, 10, 11])]
'''
nobs, nvars = d.shape
start1, col1 = np.nonzero(np.diff(d,axis=0)==1)
end1, col1_ = np.nonzero(np.diff(d,axis=0)==-1)
cc = np.arange(nvars)
#print(cc, np.r_[[0], col1], np.r_[col1_, [nvars-1]]
if ((not (np.r_[[0], col1] == cc).all())
or (not (np.r_[col1_, [nvars-1]] == cc).all())):
raise ValueError('dummy variable is not sorted')
start = np.r_[[0], start1+1]
end = np.r_[end1+1, [nobs]]
return start, end
def dummy_nested(d1, d2, method='full'):
'''unfinished and incomplete mainly copy past dummy_product
dummy variable from product of two dummy variables
Parameters
----------
d1, d2 : ndarray
two dummy variables, d2 is assumed to be nested in d1
Assumes full set for methods 'drop-last' and 'drop-first'.
method : {'full', 'drop-last', 'drop-first'}
'full' returns the full product, which in this case is d2.
The drop methods provide an effects encoding:
(constant, main effects, subgroup effects). The first or last columns
of the dummy variable (i.e. levels) are dropped to get full rank
encoding.
Returns
-------
dummy : ndarray
dummy variable for product, see method
'''
if method == 'full':
return d2
start1, end1 = dummy_limits(d1)
start2, end2 = dummy_limits(d2)
first = np.in1d(start2, start1)
last = np.in1d(end2, end1)
equal = (first == last)
col_dropf = ~first*~equal
col_dropl = ~last*~equal
if method == 'drop-last':
d12rl = dummy_product(d1[:,:-1], d2[:,:-1])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,:-1], d2[:,col_dropl]))
#Note: dtype int should preserve dtype of d1 and d2
elif method == 'drop-first':
d12r = dummy_product(d1[:,1:], d2[:,1:])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,1:], d2[:,col_dropf]))
else:
raise ValueError('method not recognized')
return dd, col_dropf, col_dropl
class DummyTransform(object):
'''Conversion between full rank dummy encodings
y = X b + u
b = C a
a = C^{-1} b
y = X C a + u
define Z = X C, then
y = Z a + u
contrasts:
R_b b = r
R_a a = R_b C a = r
where R_a = R_b C
Here C is the transform matrix, with dot_left and dot_right as the main
methods, and the same for the inverse transform matrix, C^{-1}
Note:
- The class was mainly written to keep left and right straight.
- No checking is done.
- not sure yet if method names make sense
'''
def __init__(self, d1, d2):
'''C such that d1 C = d2, with d1 = X, d2 = Z
should be (x, z) in arguments ?
'''
self.transf_matrix = np.linalg.lstsq(d1, d2, rcond=-1)[0]
self.invtransf_matrix = np.linalg.lstsq(d2, d1, rcond=-1)[0]
def dot_left(self, a):
''' b = C a
'''
return np.dot(self.transf_matrix, a)
def dot_right(self, x):
''' z = x C
'''
return np.dot(x, self.transf_matrix)
def inv_dot_left(self, b):
''' a = C^{-1} b
'''
return np.dot(self.invtransf_matrix, b)
def inv_dot_right(self, z):
''' x = z C^{-1}
'''
return np.dot(z, self.invtransf_matrix)
def groupmean_d(x, d):
'''groupmeans using dummy variables
Parameters
----------
x : array_like, ndim
data array, tested for 1,2 and 3 dimensions
d : ndarray, 1d
dummy variable, needs to have the same length
as x in axis 0.
Returns
-------
groupmeans : ndarray, ndim-1
means for each group along axis 0, the levels
of the groups are the last axis
Notes
-----
This will be memory intensive if there are many levels
in the categorical variable, i.e. many columns in the
dummy variable. In this case it is recommended to use
a more efficient version.
'''
x = np.asarray(x)
## if x.ndim == 1:
## nvars = 1
## else:
nvars = x.ndim + 1
sli = [slice(None)] + [None]*(nvars-2) + [slice(None)]
return (x[...,None] * d[sli]).sum(0)*1./d.sum(0)
class TwoWay(object):
'''a wrapper class for two way anova type of analysis with OLS
currently mainly to bring things together
Notes
-----
unclear: adding multiple test might assume block design or orthogonality
This estimates the full dummy version with OLS.
The drop first dummy representation can be recovered through the
transform method.
TODO: add more methods, tests, pairwise, multiple, marginal effects
try out what can be added for userfriendly access.
missing: ANOVA table
'''
def __init__(self, endog, factor1, factor2, varnames=None):
self.nobs = factor1.shape[0]
if varnames is None:
vname1 = 'a'
vname2 = 'b'
else:
vname1, vname1 = varnames
self.d1, self.d1_labels = d1, d1_labels = dummy_1d(factor1, vname1)
self.d2, self.d2_labels = d2, d2_labels = dummy_1d(factor2, vname2)
self.nlevel1 = nlevel1 = d1.shape[1]
self.nlevel2 = nlevel2 = d2.shape[1]
#get product dummies
res = contrast_product(d1_labels, d2_labels)
prodlab, C1, C1lab, C2, C2lab, _ = res
self.prod_label, self.C1, self.C1_label, self.C2, self.C2_label, _ = res
dp_full = dummy_product(d1, d2, method='full')
dp_dropf = dummy_product(d1, d2, method='drop-first')
self.transform = DummyTransform(dp_full, dp_dropf)
#estimate the model
self.nvars = dp_full.shape[1]
self.exog = dp_full
self.resols = sm.OLS(endog, dp_full).fit()
self.params = self.resols.params
#get transformed parameters, (constant, main, interaction effect)
self.params_dropf = self.transform.inv_dot_left(self.params)
self.start_interaction = 1 + (nlevel1 - 1) + (nlevel2 - 1)
self.n_interaction = self.nvars - self.start_interaction
#convert to cached property
def r_nointer(self):
'''contrast/restriction matrix for no interaction
'''
nia = self.n_interaction
R_nointer = np.hstack((np.zeros((nia, self.nvars-nia)), np.eye(nia)))
#inter_direct = resols_full_dropf.tval[-nia:]
R_nointer_transf = self.transform.inv_dot_right(R_nointer)
self.R_nointer_transf = R_nointer_transf
return R_nointer_transf
def ttest_interaction(self):
'''ttests for no-interaction terms are zero
'''
#use self.r_nointer instead
nia = self.n_interaction
R_nointer = np.hstack((np.zeros((nia, self.nvars-nia)), np.eye(nia)))
#inter_direct = resols_full_dropf.tval[-nia:]
R_nointer_transf = self.transform.inv_dot_right(R_nointer)
self.R_nointer_transf = R_nointer_transf
t_res = self.resols.t_test(R_nointer_transf)
return t_res
def ftest_interaction(self):
'''ttests for no-interaction terms are zero
'''
R_nointer_transf = self.r_nointer()
return self.resols.f_test(R_nointer_transf)
def ttest_conditional_effect(self, factorind):
if factorind == 1:
return self.resols.t_test(self.C1), self.C1_label
else:
return self.resols.t_test(self.C2), self.C2_label
def summary_coeff(self):
from statsmodels.iolib import SimpleTable
params_arr = self.params.reshape(self.nlevel1, self.nlevel2)
stubs = self.d1_labels
headers = self.d2_labels
title = 'Estimated Coefficients by factors'
table_fmt = dict(
data_fmts = ["%#10.4g"]*self.nlevel2)
return SimpleTable(params_arr, headers, stubs, title=title,
txt_fmt=table_fmt)
# --------------- tests
# TODO: several tests still missing, several are in the example with print
class TestContrastTools(object):
def __init__(self):
self.v1name = ['a0', 'a1', 'a2']
self.v2name = ['b0', 'b1']
self.d1 = np.array([[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1]])
def test_dummy_1d(self):
x = np.array(['F', 'F', 'M', 'M', 'F', 'F', 'M', 'M', 'F', 'F', 'M', 'M'],
dtype='|S1')
d, labels = (np.array([[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1]]), ['gender_F', 'gender_M'])
res_d, res_labels = dummy_1d(x, varname='gender')
assert_equal(res_d, d)
assert_equal(res_labels, labels)
def test_contrast_product(self):
res_cp = contrast_product(self.v1name, self.v2name)
res_t = [0]*6
res_t[0] = ['a0_b0', 'a0_b1', 'a1_b0', 'a1_b1', 'a2_b0', 'a2_b1']
res_t[1] = np.array([[-1., 0., 1., 0., 0., 0.],
[ 0., -1., 0., 1., 0., 0.],
[-1., 0., 0., 0., 1., 0.],
[ 0., -1., 0., 0., 0., 1.]])
res_t[2] = ['a1_b0-a0_b0', 'a1_b1-a0_b1', 'a2_b0-a0_b0', 'a2_b1-a0_b1']
res_t[3] = np.array([[-1., 1., 0., 0., 0., 0.],
[ 0., 0., -1., 1., 0., 0.],
[ 0., 0., 0., 0., -1., 1.]])
res_t[4] = ['a0_b1-a0_b0', 'a1_b1-a1_b0', 'a2_b1-a2_b0']
for ii in range(5):
np.testing.assert_equal(res_cp[ii], res_t[ii], err_msg=str(ii))
def test_dummy_limits(self):
b,e = dummy_limits(self.d1)
assert_equal(b, np.array([0, 4, 8]))
assert_equal(e, np.array([ 4, 8, 12]))
if __name__ == '__main__':
tt = TestContrastTools()
tt.test_contrast_product()
tt.test_dummy_1d()
tt.test_dummy_limits()
import statsmodels.api as sm
examples = ['small', 'large', None][1]
v1name = ['a0', 'a1', 'a2']
v2name = ['b0', 'b1']
res_cp = contrast_product(v1name, v2name)
print(res_cp)
y = np.arange(12)
x1 = np.arange(12)//4
x2 = np.arange(12)//2 % 2
if 'small' in examples:
d1, d1_labels = dummy_1d(x1)
d2, d2_labels = dummy_1d(x2)
if 'large' in examples:
x1 = np.repeat(x1, 5, axis=0)
x2 = np.repeat(x2, 5, axis=0)
nobs = x1.shape[0]
d1, d1_labels = dummy_1d(x1)
d2, d2_labels = dummy_1d(x2)
dd_full = dummy_product(d1, d2, method='full')
dd_dropl = dummy_product(d1, d2, method='drop-last')
dd_dropf = dummy_product(d1, d2, method='drop-first')
#Note: full parameterization of dummies is orthogonal
#np.eye(6)*10 in "large" example
print((np.dot(dd_full.T, dd_full) == np.diag(dd_full.sum(0))).all())
#check that transforms work
#generate 3 data sets with the 3 different parameterizations
effect_size = [1., 0.01][1]
noise_scale = [0.001, 0.1][0]
noise = noise_scale * np.random.randn(nobs)
beta = effect_size * np.arange(1,7)
ydata_full = (dd_full * beta).sum(1) + noise
ydata_dropl = (dd_dropl * beta).sum(1) + noise
ydata_dropf = (dd_dropf * beta).sum(1) + noise
resols_full_full = sm.OLS(ydata_full, dd_full).fit()
resols_full_dropf = sm.OLS(ydata_full, dd_dropf).fit()
params_f_f = resols_full_full.params
params_f_df = resols_full_dropf.params
resols_dropf_full = sm.OLS(ydata_dropf, dd_full).fit()
resols_dropf_dropf = sm.OLS(ydata_dropf, dd_dropf).fit()
params_df_f = resols_dropf_full.params
params_df_df = resols_dropf_dropf.params
tr_of = np.linalg.lstsq(dd_dropf, dd_full, rcond=-1)[0]
tr_fo = np.linalg.lstsq(dd_full, dd_dropf, rcond=-1)[0]
print(np.dot(tr_fo, params_df_df) - params_df_f)
print(np.dot(tr_of, params_f_f) - params_f_df)
transf_f_df = DummyTransform(dd_full, dd_dropf)
print(np.max(np.abs((dd_full - transf_f_df.inv_dot_right(dd_dropf)))))
print(np.max(np.abs((dd_dropf - transf_f_df.dot_right(dd_full)))))
print(np.max(np.abs((params_df_df
- transf_f_df.inv_dot_left(params_df_f)))))
np.max(np.abs((params_f_df
- transf_f_df.inv_dot_left(params_f_f))))
prodlab, C1, C1lab, C2, C2lab,_ = contrast_product(v1name, v2name)
print('\ntvalues for no effect of factor 1')
print('each test is conditional on a level of factor 2')
print(C1lab)
print(resols_dropf_full.t_test(C1).tvalue)
print('\ntvalues for no effect of factor 2')
print('each test is conditional on a level of factor 1')
print(C2lab)
print(resols_dropf_full.t_test(C2).tvalue)
#covariance matrix of restrictions C2, note: orthogonal
resols_dropf_full.cov_params(C2)
#testing for no interaction effect
R_noint = np.hstack((np.zeros((2,4)), np.eye(2)))
inter_direct = resols_full_dropf.tvalues[-2:]
inter_transf = resols_full_full.t_test(transf_f_df.inv_dot_right(R_noint)).tvalue
print(np.max(np.abs((inter_direct - inter_transf))))
#now with class version
tw = TwoWay(ydata_dropf, x1, x2)
print(tw.ttest_interaction().tvalue)
print(tw.ttest_interaction().pvalue)
print(tw.ftest_interaction().fvalue)
print(tw.ftest_interaction().pvalue)
print(tw.ttest_conditional_effect(1)[0].tvalue)
print(tw.ttest_conditional_effect(2)[0].tvalue)
print(tw.summary_coeff())
''' documentation for early examples while developing - some have changed already
>>> y = np.arange(12)
>>> y
array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
>>> x1 = np.arange(12)//4
>>> x1
array([0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2])
>>> x2 = np.arange(12)//2%2
>>> x2
array([0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1])
>>> d1 = dummy_1d(x1)
>>> d1
array([[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1]])
>>> d2 = dummy_1d(x2)
>>> d2
array([[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1]])
>>> d12 = dummy_product(d1, d2)
>>> d12
array([[1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1]])
>>> d12rl = dummy_product(d1[:,:-1], d2[:,:-1])
>>> np.column_stack((np.ones(d1.shape[0]), d1[:,:-1], d2[:,:-1],d12rl))
array([[ 1., 1., 0., 1., 1., 0.],
[ 1., 1., 0., 1., 1., 0.],
[ 1., 1., 0., 0., 0., 0.],
[ 1., 1., 0., 0., 0., 0.],
[ 1., 0., 1., 1., 0., 1.],
[ 1., 0., 1., 1., 0., 1.],
[ 1., 0., 1., 0., 0., 0.],
[ 1., 0., 1., 0., 0., 0.],
[ 1., 0., 0., 1., 0., 0.],
[ 1., 0., 0., 1., 0., 0.],
[ 1., 0., 0., 0., 0., 0.],
[ 1., 0., 0., 0., 0., 0.]])
'''
#nprod = ['%s_%s' % (i,j) for i in ['a0', 'a1', 'a2'] for j in ['b0', 'b1']]
#>>> [''.join(['%s%s' % (signstr(c),v) for c,v in zip(row, nprod) if c != 0])
# for row in np.kron(dd[1:], np.eye(2))]
'''
>>> nprod = ['%s_%s' % (i,j) for i in ['a0', 'a1', 'a2'] for j in ['b0', 'b1']]
>>> nprod
['a0_b0', 'a0_b1', 'a1_b0', 'a1_b1', 'a2_b0', 'a2_b1']
>>> [''.join(['%s%s' % (signstr(c),v) for c,v in zip(row, nprod) if c != 0]) for row in np.kron(dd[1:], np.eye(2))]
['-a0b0+a1b0', '-a0b1+a1b1', '-a0b0+a2b0', '-a0b1+a2b1']
>>> [''.join(['%s%s' % (signstr(c),v) for c,v in zip(row, nprod)[::-1] if c != 0]) for row in np.kron(dd[1:], np.eye(2))]
['+a1_b0-a0_b0', '+a1_b1-a0_b1', '+a2_b0-a0_b0', '+a2_b1-a0_b1']
>>> np.r_[[[1,0,0,0,0]],contrast_all_one(5)]
array([[ 1., 0., 0., 0., 0.],
[ 1., -1., 0., 0., 0.],
[ 1., 0., -1., 0., 0.],
[ 1., 0., 0., -1., 0.],
[ 1., 0., 0., 0., -1.]])
>>> idxprod = [(i,j) for i in range(3) for j in range(2)]
>>> idxprod
[(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1)]
>>> np.array(idxprod).reshape(2,3,2,order='F')[:,:,0]
array([[0, 1, 2],
[0, 1, 2]])
>>> np.array(idxprod).reshape(2,3,2,order='F')[:,:,1]
array([[0, 0, 0],
[1, 1, 1]])
>>> dd3_ = np.r_[[[0,0,0]],contrast_all_one(3)]
pairwise contrasts and reparameterization
dd = np.r_[[[1,0,0,0,0]],-contrast_all_one(5)]
>>> dd
array([[ 1., 0., 0., 0., 0.],
[-1., 1., 0., 0., 0.],
[-1., 0., 1., 0., 0.],
[-1., 0., 0., 1., 0.],
[-1., 0., 0., 0., 1.]])
>>> np.dot(dd.T, np.arange(5))
array([-10., 1., 2., 3., 4.])
>>> np.round(np.linalg.inv(dd.T)).astype(int)
array([[1, 1, 1, 1, 1],
[0, 1, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 1]])
>>> np.round(np.linalg.inv(dd)).astype(int)
array([[1, 0, 0, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 1, 0, 0],
[1, 0, 0, 1, 0],
[1, 0, 0, 0, 1]])
>>> dd
array([[ 1., 0., 0., 0., 0.],
[-1., 1., 0., 0., 0.],
[-1., 0., 1., 0., 0.],
[-1., 0., 0., 1., 0.],
[-1., 0., 0., 0., 1.]])
>>> ddinv=np.round(np.linalg.inv(dd.T)).astype(int)
>>> np.dot(ddinv, np.arange(5))
array([10, 1, 2, 3, 4])
>>> np.dot(dd, np.arange(5))
array([ 0., 1., 2., 3., 4.])
>>> np.dot(dd, 5+np.arange(5))
array([ 5., 1., 2., 3., 4.])
>>> ddinv2 = np.round(np.linalg.inv(dd)).astype(int)
>>> np.dot(ddinv2, np.arange(5))
array([0, 1, 2, 3, 4])
>>> np.dot(ddinv2, 5+np.arange(5))
array([ 5, 11, 12, 13, 14])
>>> np.dot(ddinv2, [5, 0, 0 , 1, 2])
array([5, 5, 5, 6, 7])
>>> np.dot(ddinv2, dd)
array([[ 1., 0., 0., 0., 0.],
[ 0., 1., 0., 0., 0.],
[ 0., 0., 1., 0., 0.],
[ 0., 0., 0., 1., 0.],
[ 0., 0., 0., 0., 1.]])
>>> dd3 = -np.r_[[[1,0,0]],contrast_all_one(3)]
>>> dd2 = -np.r_[[[1,0]],contrast_all_one(2)]
>>> np.kron(np.eye(3), dd2)
array([[-1., 0., 0., 0., 0., 0.],
[-1., 1., 0., 0., 0., 0.],
[ 0., 0., -1., 0., 0., 0.],
[ 0., 0., -1., 1., 0., 0.],
[ 0., 0., 0., 0., -1., 0.],
[ 0., 0., 0., 0., -1., 1.]])
>>> dd2
array([[-1., 0.],
[-1., 1.]])
>>> np.kron(np.eye(3), dd2[1:])
array([[-1., 1., 0., 0., 0., 0.],
[ 0., 0., -1., 1., 0., 0.],
[ 0., 0., 0., 0., -1., 1.]])
>>> np.kron(dd[1:], np.eye(2))
array([[-1., 0., 1., 0., 0., 0.],
[ 0., -1., 0., 1., 0., 0.],
[-1., 0., 0., 0., 1., 0.],
[ 0., -1., 0., 0., 0., 1.]])
d_ = np.r_[[[1,0,0,0,0]],contrast_all_one(5)]
>>> d_
array([[ 1., 0., 0., 0., 0.],
[ 1., -1., 0., 0., 0.],
[ 1., 0., -1., 0., 0.],
[ 1., 0., 0., -1., 0.],
[ 1., 0., 0., 0., -1.]])
>>> np.round(np.linalg.pinv(d_)).astype(int)
array([[ 1, 0, 0, 0, 0],
[ 1, -1, 0, 0, 0],
[ 1, 0, -1, 0, 0],
[ 1, 0, 0, -1, 0],
[ 1, 0, 0, 0, -1]])
>>> np.linalg.inv(d_).astype(int)
array([[ 1, 0, 0, 0, 0],
[ 1, -1, 0, 0, 0],
[ 1, 0, -1, 0, 0],
[ 1, 0, 0, -1, 0],
[ 1, 0, 0, 0, -1]])
group means
>>> sli = [slice(None)] + [None]*(3-2) + [slice(None)]
>>> (np.column_stack((y, x1, x2))[...,None] * d1[sli]).sum(0)*1./d1.sum(0)
array([[ 1.5, 5.5, 9.5],
[ 0. , 1. , 2. ],
[ 0.5, 0.5, 0.5]])
>>> [(z[:,None] * d1).sum(0)*1./d1.sum(0) for z in np.column_stack((y, x1, x2)).T]
[array([ 1.5, 5.5, 9.5]), array([ 0., 1., 2.]), array([ 0.5, 0.5, 0.5])]
>>>
'''
|
bsd-3-clause
| -6,191,862,223,996,965,000
| 29.020855
| 121
| 0.501841
| false
| 2.838411
| true
| false
| false
|
chrislit/abydos
|
tests/distance/test_distance_goodman_kruskal_lambda.py
|
1
|
4737
|
# Copyright 2019-2020 by Christopher C. Little.
# This file is part of Abydos.
#
# Abydos is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Abydos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Abydos. If not, see <http://www.gnu.org/licenses/>.
"""abydos.tests.distance.test_distance_goodman_kruskal_lambda.
This module contains unit tests for abydos.distance.GoodmanKruskalLambda
"""
import unittest
from abydos.distance import GoodmanKruskalLambda
class GoodmanKruskalLambdaTestCases(unittest.TestCase):
"""Test GoodmanKruskalLambda functions.
abydos.distance.GoodmanKruskalLambda
"""
cmp = GoodmanKruskalLambda()
cmp_no_d = GoodmanKruskalLambda(alphabet=0)
def test_goodman_kruskal_lambda_sim(self):
"""Test abydos.distance.GoodmanKruskalLambda.sim."""
# Base cases
self.assertEqual(self.cmp.sim('', ''), 1.0)
self.assertEqual(self.cmp.sim('a', ''), 0.0)
self.assertEqual(self.cmp.sim('', 'a'), 0.0)
self.assertEqual(self.cmp.sim('abc', ''), 0.0)
self.assertEqual(self.cmp.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp.sim('abc', 'abc'), 1.0)
self.assertEqual(self.cmp.sim('abcd', 'efgh'), 0.0)
self.assertAlmostEqual(self.cmp.sim('Nigel', 'Niall'), 0.0)
self.assertAlmostEqual(self.cmp.sim('Niall', 'Nigel'), 0.0)
self.assertAlmostEqual(self.cmp.sim('Colin', 'Coiln'), 0.0)
self.assertAlmostEqual(self.cmp.sim('Coiln', 'Colin'), 0.0)
self.assertAlmostEqual(
self.cmp.sim('ATCAACGAGT', 'AACGATTAG'), 0.3333333333
)
# Tests with alphabet=0 (no d factor)
self.assertEqual(self.cmp_no_d.sim('', ''), 1.0)
self.assertEqual(self.cmp_no_d.sim('a', ''), 0.0)
self.assertEqual(self.cmp_no_d.sim('', 'a'), 0.0)
self.assertEqual(self.cmp_no_d.sim('abc', ''), 0.0)
self.assertEqual(self.cmp_no_d.sim('', 'abc'), 0.0)
self.assertEqual(self.cmp_no_d.sim('abc', 'abc'), 1.0)
self.assertEqual(self.cmp_no_d.sim('abcd', 'efgh'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Nigel', 'Niall'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Niall', 'Nigel'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Colin', 'Coiln'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.sim('Coiln', 'Colin'), 0.0)
self.assertAlmostEqual(
self.cmp_no_d.sim('ATCAACGAGT', 'AACGATTAG'), 0.0
)
def test_goodman_kruskal_lambda_dist(self):
"""Test abydos.distance.GoodmanKruskalLambda.dist."""
# Base cases
self.assertEqual(self.cmp.dist('', ''), 0.0)
self.assertEqual(self.cmp.dist('a', ''), 1.0)
self.assertEqual(self.cmp.dist('', 'a'), 1.0)
self.assertEqual(self.cmp.dist('abc', ''), 1.0)
self.assertEqual(self.cmp.dist('', 'abc'), 1.0)
self.assertEqual(self.cmp.dist('abc', 'abc'), 0.0)
self.assertEqual(self.cmp.dist('abcd', 'efgh'), 1.0)
self.assertAlmostEqual(self.cmp.dist('Nigel', 'Niall'), 1.0)
self.assertAlmostEqual(self.cmp.dist('Niall', 'Nigel'), 1.0)
self.assertAlmostEqual(self.cmp.dist('Colin', 'Coiln'), 1.0)
self.assertAlmostEqual(self.cmp.dist('Coiln', 'Colin'), 1.0)
self.assertAlmostEqual(
self.cmp.dist('ATCAACGAGT', 'AACGATTAG'), 0.6666666667
)
# Tests with alphabet=0 (no d factor)
self.assertEqual(self.cmp_no_d.dist('', ''), 0.0)
self.assertEqual(self.cmp_no_d.dist('a', ''), 1.0)
self.assertEqual(self.cmp_no_d.dist('', 'a'), 1.0)
self.assertEqual(self.cmp_no_d.dist('abc', ''), 1.0)
self.assertEqual(self.cmp_no_d.dist('', 'abc'), 1.0)
self.assertEqual(self.cmp_no_d.dist('abc', 'abc'), 0.0)
self.assertEqual(self.cmp_no_d.dist('abcd', 'efgh'), 0.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Nigel', 'Niall'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Niall', 'Nigel'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Colin', 'Coiln'), 1.0)
self.assertAlmostEqual(self.cmp_no_d.dist('Coiln', 'Colin'), 1.0)
self.assertAlmostEqual(
self.cmp_no_d.dist('ATCAACGAGT', 'AACGATTAG'), 1.0
)
if __name__ == '__main__':
unittest.main()
|
gpl-3.0
| -650,457,956,675,182,000
| 42.063636
| 73
| 0.628668
| false
| 3.12467
| true
| false
| false
|
jbzdak/data-base-checker
|
grading/tests/test_models.py
|
1
|
7167
|
# coding=utf-8
from django.contrib.auth.models import User, Group
from django.test.testcases import TestCase
from grading.models import *
from grading.autograding import get_autograders
class StudentTest(TestCase):
def test_user_creation_creates_student(self):
u = User.objects.create(username = "test1", email="foo@foo.pl")
u.groups.add(Group.objects.get(name = "students"))
u.save()
qs = Student.objects.filter(user=u)
self.assertEqual(len(qs), 1)
def test_can_update_user(self):
u = User.objects.create(username = "test1", email="foo@foo.pl")
u.groups.add(Group.objects.get(name = "students"))
u.save()
u.email = "bar@bar.pl"
u.save()
def test_student_not_created_for_inactive_users(self):
u = User.objects.create(username = "test1", email="foo@foo.pl", is_active=False)
u.groups.add(Group.objects.get(name = "students"))
u.save()
qs = Student.objects.filter(user=u)
self.assertEqual(len(qs), 0)
def test_student_not_created_for_staff_users(self):
u = User.objects.create(username = "test1", email="foo@foo.pl", is_staff=True)
u.groups.add(Group.objects.get(name = "students"))
u.save()
qs = Student.objects.filter(user=u)
self.assertEqual(len(qs), 0)
class ActivityTest(TestCase):
def test_sort_key_auto_set(self):
a = GradeableActivity.objects.create(name="foo")
self.assertEqual(a.sort_key, "foo")
class TestFixture(TestCase):
def setUp(self):
self.u = User.objects.create(username = "test1", email="foo@foo.pl")
self.u.groups.add(Group.objects.get(name = "students"))
self.u.save()
self.student = Student.objects.filter(user=self.u).get()
self.other_user = User.objects.create(username = "other", email="foo@foo.pl")
self.other_user.groups.add(Group.objects.get(name = "students"))
self.other_user.save()
self.other_student =Student.objects.filter(user=self.other_user).get()
self.group = Course.objects.create(name = "course")
self.other_group = Course.objects.create(name = "other_group")
self.student.course = self.group
self.student.save()
self.other_student.course = self.other_group
self.other_student.save()
self.activity = GradeableActivity(name = "activity")
self.activity.save()
self.activity.courses.add(self.group)
self.activity.save()
self.otheractivity = GradeableActivity(name = "other")
self.otheractivity.save()
self.otheractivity.courses.add(self.other_group)
self.otheractivity.save()
class TestGrades(TestFixture):
def test_sync_grades_when_activity_is_added_to_group(self):
# After setup it shpould be so:
self.assertEqual(len(self.student.grades.all()), 1)
#Other student shouldn't change
self.assertEqual(len(self.other_student.grades.all()), 1)
activity = GradeableActivity(name = "activity2")
activity.save()
activity.courses.add(self.group)
activity.save()
#Now we should have two grades
self.assertEqual(len(self.student.grades.all()), 2)
#Other student shouldn't change
self.assertEqual(len(self.other_student.grades.all()), 1)
for g in self.student.grades.all():
self.assertEqual(g.grade, 2.0)
def test_sync_grades_when_student_is_added_to_group(self):
u = User.objects.create(username = "test2", email="foo@foo.pl")
u.groups.add(Group.objects.get(name = "students"))
u.save()
student = Student.objects.filter(user=u).get()
# Before addition there should be no grades
self.assertEqual(len(student.grades.all()), 0)
student.course = self.group
student.save()
self.assertEqual(len(student.grades.all()), 1)
class TestGrading(TestFixture):
def setUp(self):
super(TestGrading, self).setUp()
self.grade_part_1 = GradePart.objects.create(
weight = 1,
required = True,
activity = self.activity,
name = "Zadanie 1"
)
self.grade_part_2 = GradePart.objects.create(
weight = 2,
required = False,
activity = self.activity,
name = "Zadanie 2"
)
self.activity.default_grade = 812.0
self.activity.save()
def test_default_grade_retuended_when_all_activities_unfinished(self):
sg = StudentGrade()
grade_student(self.activity, self.student, sg)
self.assertEqual(sg.grade, 812.0)
self.assertIn('Zadanie 1', sg.short_description)
def test_default_grade_retuended_when_required_activities_unfinished(self):
GradePart.objects.grade(self.grade_part_2, self.student, 5)
sg = StudentGrade()
grade_student(self.activity, self.student, sg)
self.assertEqual(sg.grade, 812.0)
self.assertIn('Zadanie 1', sg.short_description)
def test_grade_calculated_when_all_required_activitees_finished(self):
GradePart.objects.grade(self.grade_part_1, self.student, 5)
sg = StudentGrade()
grade_student(self.activity, self.student, sg)
self.assertEqual(sg.grade, 3)
def test_grade_calculated_when_all_activities_finished(self):
GradePart.objects.grade(self.grade_part_2, self.student, 3)
GradePart.objects.grade(self.grade_part_1, self.student, 3)
sg = StudentGrade()
grade_student(self.activity, self.student, sg)
self.assertEqual(sg.grade, 3)
def test_default_grade_returned_when_regired_activity_has_grade_below_passing(self):
GradePart.objects.grade(self.grade_part_1, self.student, 2)
GradePart.objects.grade(self.grade_part_2, self.student, 3)
sg = StudentGrade()
grade_student(self.activity, self.student, sg)
self.assertEqual(sg.grade, 812.0)
def test_grade_gets_updated(self):
GradePart.objects.grade(self.grade_part_1, self.student, 5.0)
self.assertEqual(StudentGrade.objects.get(student=self.student, activity=self.activity).grade, 3)
def test_grade_gets_updated_if_we_add_new_grade_part(self):
#Updates the database so grade is calculated
self.test_grade_calculated_when_all_activities_finished()
#Sanity check
sg = StudentGrade()
grade_student(self.activity, self.student, sg)
self.assertNotEqual(sg.grade, 812.0)
GradePart.objects.create(
name = "test-xxx",
required = True,
activity = self.activity,
)
sg = StudentGrade()
grade_student(self.activity, self.student, sg)
self.assertEqual(sg.grade, 812.0)
class TestAutogradeableGradePart(TestFixture):
def test_name_is_set(self):
model = AutogradeableGradePart.objects.create(
activity = self.activity,
autograding_controller = get_autograders()['test']
)
self.assertEqual(model.name, model.autograding_controller)
|
gpl-3.0
| 8,593,671,345,695,744,000
| 32.180556
| 104
| 0.637784
| false
| 3.534024
| true
| false
| false
|
mschmittfull/nbodykit
|
nbodykit/core/source/Grid.py
|
1
|
3878
|
from nbodykit.core import Source
from nbodykit.core.source import Painter
from bigfile import BigFileMPI
from pmesh.pm import RealField, ComplexField, ParticleMesh
import numpy
from pmesh import window
class GridSource(Source):
plugin_name = "Source.Grid"
def __init__(self, path, dataset, attrs={}, painter=Painter()):
# cannot do this in the module because the module file is ran before plugin_manager
# is init.
self.cat = BigFileMPI(comm=self.comm, filename=path)[dataset]
self._attrs = {}
self._attrs.update(self.cat.attrs)
self._attrs.update(attrs)
for key in self.attrs.keys():
self.attrs[key] = numpy.asarray(self.attrs[key])
if self.comm.rank == 0:
self.logger.info("attrs = %s" % self.attrs)
self.painter= painter
self.Nmesh = self.attrs['Nmesh'].squeeze()
if 'shotnoise' in self.attrs:
self.shotnoise = self.attrs['shotnoise'].squeeze()
else:
self.shotnoise = 0
if self.cat.dtype.kind == 'c':
self.isfourier = True
else:
self.isfourier = False
@property
def columns(self):
return []
@property
def attrs(self):
return self._attrs
@classmethod
def fill_schema(cls):
s = cls.schema
s.description = "read snapshot files a multitype file"
s.add_argument("path", help="the file path to load the data from")
s.add_argument("dataset", help="dataset")
s.add_argument("attrs", type=dict, help="override attributes from the file")
s.add_argument("painter", type=Painter.from_config, help="painter parameters")
# XXX for painting needs some refactoring
s.add_argument("painter.paintbrush", choices=list(window.methods.keys()), help="paintbrush")
s.add_argument("painter.frho", type=str, help="A python expresion for transforming the real space density field. variables: rho. example: 1 + (rho - 1)**2")
s.add_argument("painter.fk", type=str, help="A python expresion for transforming the fourier space density field. variables: k, kx, ky, kz. example: exp(-(k * 0.5)**2). applied before frho ")
s.add_argument("painter.normalize", type=bool, help="Normalize the field to set mean == 1. Applied before fk.")
s.add_argument("painter.setMean", type=float, help="Set the mean. Applied after normalize.")
s.add_argument("painter.interlaced", type=bool, help="interlaced.")
def read(self, columns):
yield [None for key in columns]
def paint(self, pm):
if self.painter is None:
raise ValueError("No painter is provided")
real = RealField(pm)
if any(pm.Nmesh != self.Nmesh):
pmread = ParticleMesh(BoxSize=pm.BoxSize, Nmesh=(self.Nmesh, self.Nmesh, self.Nmesh),
dtype='f4', comm=self.comm)
else:
pmread = real.pm
ds = self.cat
if self.isfourier:
if self.comm.rank == 0:
self.logger.info("reading complex field")
complex2 = ComplexField(pmread)
assert self.comm.allreduce(complex2.size) == ds.size
start = sum(self.comm.allgather(complex2.size)[:self.comm.rank])
end = start + complex2.size
complex2.unsort(ds[start:end])
complex2.resample(real)
else:
if self.comm.rank == 0:
self.logger.info("reading real field")
real2 = RealField(pmread)
start = sum(self.comm.allgather(real2.size)[:self.comm.rank])
end = start + real2.size
real2.unsort(ds[start:end])
real2.resample(real)
real.shotnoise = self.shotnoise
# apply transformations
self.painter.transform(self, real)
return real
|
gpl-3.0
| -1,629,912,195,537,198,300
| 34.907407
| 199
| 0.610366
| false
| 3.757752
| false
| false
| false
|
penny4860/SVHN-deep-digit-detector
|
digit_detector/extractor.py
|
1
|
3315
|
#-*- coding: utf-8 -*-
import cv2
import numpy as np
import progressbar
import digit_detector.region_proposal as rp
class Extractor:
def __init__(self, region_proposer, annotator, overlap_calculator):
"""
overlap_calculator : OverlapCalculator
instance of OverlapCalculator class
"""
self._positive_samples = []
self._negative_samples = []
self._positive_labels = []
self._negative_labels = []
self._region_proposer = region_proposer
self._annotator = annotator
self._overlap_calculator = overlap_calculator
def extract_patch(self, image_files, patch_size, positive_overlap_thd, negative_overlap_thd):
bar = progressbar.ProgressBar(widgets=[' [', progressbar.Timer(), '] ', progressbar.Bar(), ' (', progressbar.ETA(), ') ',], maxval=len(image_files)).start()
for i, image_file in enumerate(image_files):
image = cv2.imread(image_file)
# 1. detect regions
candidate_regions = self._region_proposer.detect(image)
candidate_patches = candidate_regions.get_patches(dst_size=patch_size)
candidate_boxes = candidate_regions.get_boxes()
# 2. load ground truth
true_boxes, true_labels = self._annotator.get_boxes_and_labels(image_file)
true_patches = rp.Regions(image, true_boxes).get_patches(dst_size=patch_size)
# 3. calc overlap
overlaps = self._overlap_calculator.calc_ious_per_truth(candidate_boxes, true_boxes)
# 4. add patch to the samples
self._select_positive_patch(candidate_patches, true_labels, overlaps, positive_overlap_thd)
self._append_positive_patch(true_patches, true_labels)
self._select_negative_patch(candidate_patches, overlaps, negative_overlap_thd)
bar.update(i)
bar.finish()
return self._merge_sample()
def _append_positive_patch(self, true_patches, true_labels):
self._positive_samples.append(true_patches)
self._positive_labels.append(true_labels)
def _select_positive_patch(self, candidate_patches, true_labels, overlaps, overlap_thd):
for i, label in enumerate(true_labels):
samples = candidate_patches[overlaps[i,:]>overlap_thd]
labels_ = np.zeros((len(samples), )) + label
self._positive_samples.append(samples)
self._positive_labels.append(labels_)
def _select_negative_patch(self, candidate_patches, overlaps, overlap_thd):
overlaps_max = np.max(overlaps, axis=0)
self._negative_samples.append(candidate_patches[overlaps_max<overlap_thd])
def _merge_sample(self):
negative_samples = np.concatenate(self._negative_samples, axis=0)
negative_labels = np.zeros((len(negative_samples), 1))
positive_samples = np.concatenate(self._positive_samples, axis=0)
positive_labels = np.concatenate(self._positive_labels, axis=0).reshape(-1,1)
samples = np.concatenate([negative_samples, positive_samples], axis=0)
labels = np.concatenate([negative_labels, positive_labels], axis=0)
return samples, labels
|
mit
| -4,793,058,175,960,898,000
| 40.962025
| 164
| 0.626546
| false
| 4.032847
| false
| false
| false
|
akrherz/iem
|
scripts/climodat/qc_last_used_ob.py
|
1
|
2574
|
"""Report any climodat sites without recent observations."""
# stdlib
import datetime
# Third Party
from pandas.io.sql import read_sql
from pyiem.util import get_dbconn, logger
LOG = logger()
FLOOR = datetime.date.today() - datetime.timedelta(days=365)
def remove_track(iemid):
"""Cull the defunct tracks."""
pgconn = get_dbconn("mesosite")
cursor = pgconn.cursor()
cursor.execute(
"DELETE from station_attributes where iemid = %s and "
"attr = 'TRACKS_STATION'",
(iemid,),
)
cursor.close()
pgconn.commit()
def check_last(station, row):
"""Do the work."""
trackstation, tracknetwork = row["tracks"].split("|")
df = read_sql(
"SELECT max(day) from summary s JOIN stations t on "
"(s.iemid = t.iemid) WHERE t.id = %s and t.network = %s and "
"s.day > %s and (s.max_tmpf is not null or "
"s.pday is not null)",
get_dbconn("iem"),
index_col=None,
params=(trackstation, tracknetwork, FLOOR),
)
lastdate = df.iloc[0]["max"]
if lastdate is not None:
return
LOG.info(
"%s %s %.2fE %.2fN tracks non-reporting %s[%s], removing track",
station,
row["name"],
row["lon"],
row["lat"],
trackstation,
tracknetwork,
)
remove_track(row["iemid"])
def set_offline(iemid):
"""Set the station to being offline."""
pgconn = get_dbconn("mesosite")
cursor = pgconn.cursor()
cursor.execute(
"UPDATE stations SET online = 'f', archive_end = 'TODAY' WHERE "
"iemid = %s",
(iemid,),
)
cursor.close()
pgconn.commit()
def main():
"""Go Main Go."""
sdf = read_sql(
"""
with locs as (
select s.iemid, id, network, value from stations s LEFT
JOIN station_attributes a on (s.iemid = a.iemid and
a.attr = 'TRACKS_STATION'))
select s.id, s.iemid, s.network, st_x(geom) as lon, st_y(geom) as lat,
s.name, l.value as tracks from stations S LEFT JOIN locs l on
(s.iemid = l.iemid) WHERE s.network ~* 'CLIMATE' and
substr(s.id, 3, 4) != '0000' and
substr(s.id, 3, 1) != 'C' ORDER by s.id ASC
""",
get_dbconn("mesosite"),
index_col="id",
)
for station, row in sdf.iterrows():
if row["tracks"] is None:
LOG.info("%s tracks no station, setting offline.", station)
set_offline(row["iemid"])
continue
check_last(station, row)
if __name__ == "__main__":
main()
|
mit
| -500,835,713,754,578,200
| 26.978261
| 78
| 0.560606
| false
| 3.342857
| false
| false
| false
|
mskarbek/pysx
|
pysx.py
|
1
|
1558
|
#!/usr/bin/env python
import base64
import hashlib
from datetime import datetime
import hmac
import binascii
import sys
import json
import requests
import pytz
def hextobyte(hex_str):
bytes = []
hex_str = ''.join( hex_str.split(' ') )
for i in range(0, len(hex_str), 2):
bytes.append( chr( int (hex_str[i:i+2], 16 ) ) )
return ''.join( bytes )
request = {
'Date': datetime.now(pytz.timezone('GMT')).strftime('%a, %d %b %Y %H:%M:%S GMT'),
'Path': '?nodeList',
'Type': 'GET',
'Body': ''
}
pysx = {}
pysx['IP'] = sys.argv[1]
pysx['Key'] = base64.b64decode(sys.argv[2])
pysx['I'] = ''.join(['%02X' % ord(x) for x in pysx['Key']]).strip()[0:40].lower()
pysx['K'] = pysx['Key'][20:40]
pysx['P'] = ''.join(['%02X' % ord(x) for x in pysx['Key']]).strip()[80:84].lower()
pysx['request'] = '{}\n{}\n{}\n{}\n'.format(
request['Type'],
request['Path'],
request['Date'],
hashlib.sha1(request['Body']).hexdigest()
)
pysx['H'] = hmac.new(pysx['K'], pysx['request'], hashlib.sha1).hexdigest()
pysx['A'] = base64.b64encode(hextobyte(pysx['I'] + pysx['H'] + pysx['P']))
headers = {
'Content-Type': 'application/json',
'User-Agent': 'pysx 0.0.1',
'Date': request['Date'],
'Authorization': 'SKY {}'.format(pysx['A'])
}
response = requests.get('https://{}/{}'.format(pysx['IP'], request['Path']), verify = False, headers = headers)
print '\n{}\n'.format(response.request.url)
print '{}\n'.format(response.request.headers)
print '{}\n'.format(response.headers)
print '{}\n'.format(response.text)
|
apache-2.0
| 244,371,133,018,332,960
| 25.40678
| 111
| 0.59371
| false
| 2.78712
| false
| false
| false
|
WemGL/alchemist
|
alchemist/javaparser.py
|
1
|
4703
|
from alchemist import Parser
import re
class JavaParser(Parser):
def __init__(self, kwargs):
self._file_extension = "Java"
self._current_match = None
self._fields = []
self._classname = ""
Parser.__init__(self, kwargs)
def parse(self):
fh = open(self.file)
comments = []
for line in fh:
if self.matched_class_pattern(line):
self.parse_class_match(comments)
elif self.matched_comment_pattern(line):
self.parse_comment_match(comments)
elif self.matched_field_pattern(line):
self.parse_field_match(comments)
elif self.matched_end_pattern(line):
self.write_constructor()
self.write_accessors_and_mutators()
self.parse_end_match()
self.file.close()
fh.close()
def matched_class_pattern(self, line):
self._current_match = re.compile(r'^C\s(([A-Z](?=[a-z])[a-z]+)+)$').search(line)
return self._current_match is not None
def parse_class_match(self, comments):
self._classname = self._current_match.group(1)
filename = "{}.{}".format(self._classname, self._file_extension.lower())
self.file = open(filename, "w")
self.format_and_write_comments(comments)
print("public class {} {{".format(self._classname), file=self.file)
def format_and_write_comments(self, comments):
if len(comments) > 0:
joined_comments = "\n".join(comments)
print(joined_comments, file=self.file)
comments.clear()
def matched_comment_pattern(self, line):
self._current_match = re.compile(r'^[/]{2}\s.*$').search(line)
return self._current_match is not None
def parse_comment_match(self, comments):
comments.append(self._current_match.group(0))
def matched_field_pattern(self, line):
self._current_match = re.compile(r'^F\s(\b(?:[a-z]+)(?=[A-Z]+)(?:[A-Za-z]+)|[a-z]+\b)\s*((?:[A-Z]?[a-z]+(?:[[]])?))$').search(line)
return self._current_match is not None
def parse_field_match(self, comments):
self.format_and_write_comments(comments)
type = self._current_match.group(2)
identifier = self._current_match.group(1)
field = dict()
field[type] = identifier
self._fields.append(field)
print(" {} {};".format(type, identifier), file=self.file)
def matched_end_pattern(self, line):
self._current_match = re.compile(r'^E$').search(line)
return self._current_match is not None
def write_constructor(self):
match_found = len(self._current_match.group(0)) > 0
if not match_found:
return
self.write_newline()
fields = ", ".join(self.format_type_and_identifier())
print(" public {}({}) {{".format(self._classname, fields), file=self.file)
for identifier in self.get_identifiers():
self.write_initialization_for(identifier)
print(" }", file=self.file)
self.write_newline()
def write_newline(self):
print("", file=self.file, end="\n")
def format_type_and_identifier(self):
return ["{} {}".format(list(field.keys())[0], list(field.values())[0]) for field in self._fields]
def get_identifiers(self):
return [list(field.values())[0] for field in self._fields]
def write_initialization_for(self, identifier):
print(" this.{} = {};".format(identifier, identifier), file=self.file)
def write_accessors_and_mutators(self):
for type_with_identifier in self._fields:
type = list(type_with_identifier.keys())[0]
identifier = list(type_with_identifier.values())[0]
self.write_accessor(type, identifier)
self.write_mutator(type, identifier)
def write_accessor(self, type, identifier):
print(" public {} get{}() {{".format(type, self.to_pascal_case(identifier)), file=self.file)
print(" return {};".format(identifier), file=self.file)
print(" }", file=self.file)
self.write_newline()
def to_pascal_case(self, identifier):
return re.sub(r'^[a-z]', lambda letter: letter.group(0).upper(), identifier)
def write_mutator(self, type, identifier):
print(" public void set{}({} {}) {{".format(self.to_pascal_case(identifier), type, identifier), file=self.file)
print(" this.{} = {};".format(identifier, identifier), file=self.file)
print(" }", file=self.file)
self.write_newline()
def parse_end_match(self):
print("}", file=self.file)
|
gpl-3.0
| -5,193,369,130,521,656,000
| 36.927419
| 139
| 0.586647
| false
| 3.711918
| false
| false
| false
|
vstconsulting/polemarch
|
polemarch/main/migrations/0004_auto_20170710_0857.py
|
1
|
9227
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-09 22:57
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0003_initial'),
]
operations = [
migrations.CreateModel(
name='History',
fields=[
('id', models.AutoField(max_length=20, primary_key=True, serialize=False)),
('playbook', models.CharField(max_length=256)),
('start_time', models.DateTimeField(default=django.utils.timezone.now)),
('stop_time', models.DateTimeField(blank=True, null=True)),
('raw_args', models.TextField(default='')),
('raw_inventory', models.TextField(default='')),
('status', models.CharField(max_length=50)),
],
options={
'default_related_name': 'history',
},
),
migrations.CreateModel(
name='HistoryLines',
fields=[
('id', models.AutoField(max_length=20, primary_key=True, serialize=False)),
('line', models.TextField(default='')),
('line_number', models.IntegerField(default=0)),
('history', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='raw_history_line', related_query_name='raw_history_line', to='main.History')),
],
options={
'default_related_name': 'raw_history_line',
},
),
migrations.CreateModel(
name='Inventory',
fields=[
('id', models.AutoField(max_length=20, primary_key=True, serialize=False)),
('name', models.CharField(default=uuid.uuid1, max_length=512)),
],
options={
'default_related_name': 'inventories',
},
),
migrations.CreateModel(
name='PeriodicTask',
fields=[
('id', models.AutoField(max_length=20, primary_key=True, serialize=False)),
('name', models.CharField(default=uuid.uuid1, max_length=512)),
('playbook', models.CharField(max_length=256)),
('schedule', models.CharField(max_length=4096)),
('type', models.CharField(max_length=10)),
('inventory', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='periodic_tasks', related_query_name='periodic_tasks', to='main.Inventory')),
],
options={
'default_related_name': 'periodic_tasks',
},
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(max_length=20, primary_key=True, serialize=False)),
('name', models.CharField(default=uuid.uuid1, max_length=512)),
('repository', models.CharField(max_length=2048)),
('status', models.CharField(default='NEW', max_length=32)),
],
options={
'default_related_name': 'projects',
},
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(max_length=20, primary_key=True, serialize=False)),
('name', models.CharField(default=uuid.uuid1, max_length=256)),
('playbook', models.CharField(max_length=256)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tasks', related_query_name='tasks', to='main.Project')),
],
options={
'default_related_name': 'tasks',
},
),
migrations.CreateModel(
name='TypesPermissions',
fields=[
('id', models.AutoField(max_length=20, primary_key=True, serialize=False)),
],
options={
'default_related_name': 'related_objects',
},
),
migrations.RemoveField(
model_name='host',
name='environment',
),
migrations.AddField(
model_name='group',
name='children',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='group',
name='parents',
field=models.ManyToManyField(blank=True, null=True, related_name='groups', related_query_name='childrens', to='main.Group'),
),
migrations.AlterField(
model_name='variable',
name='value',
field=models.CharField(max_length=2048, null=True),
),
migrations.AlterIndexTogether(
name='group',
index_together=set([('children', 'id'), ('children',)]),
),
migrations.DeleteModel(
name='Environment',
),
migrations.AddField(
model_name='typespermissions',
name='groups',
field=models.ManyToManyField(blank=True, null=True, related_name='related_objects', related_query_name='related_objects', to='main.Group'),
),
migrations.AddField(
model_name='typespermissions',
name='history',
field=models.ManyToManyField(blank=True, null=True, related_name='related_objects', related_query_name='related_objects', to='main.History'),
),
migrations.AddField(
model_name='typespermissions',
name='hosts',
field=models.ManyToManyField(blank=True, null=True, related_name='related_objects', related_query_name='related_objects', to='main.Host'),
),
migrations.AddField(
model_name='typespermissions',
name='inventories',
field=models.ManyToManyField(blank=True, null=True, related_name='related_objects', related_query_name='related_objects', to='main.Inventory'),
),
migrations.AddField(
model_name='typespermissions',
name='periodic_tasks',
field=models.ManyToManyField(blank=True, null=True, related_name='related_objects', related_query_name='related_objects', to='main.PeriodicTask'),
),
migrations.AddField(
model_name='typespermissions',
name='projects',
field=models.ManyToManyField(blank=True, null=True, related_name='related_objects', related_query_name='related_objects', to='main.Project'),
),
migrations.AddField(
model_name='typespermissions',
name='tasks',
field=models.ManyToManyField(blank=True, null=True, related_name='related_objects', related_query_name='related_objects', to='main.Task'),
),
migrations.AddField(
model_name='typespermissions',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='related_objects', related_query_name='related_objects', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='project',
name='groups',
field=models.ManyToManyField(blank=True, null=True, related_name='projects', to='main.Group'),
),
migrations.AddField(
model_name='project',
name='hosts',
field=models.ManyToManyField(blank=True, null=True, related_name='projects', to='main.Host'),
),
migrations.AddField(
model_name='project',
name='inventories',
field=models.ManyToManyField(blank=True, null=True, related_name='projects', to='main.Inventory'),
),
migrations.AddField(
model_name='periodictask',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='periodic_tasks', related_query_name='periodic_tasks', to='main.Project'),
),
migrations.AddField(
model_name='inventory',
name='groups',
field=models.ManyToManyField(related_name='inventories', to='main.Group'),
),
migrations.AddField(
model_name='inventory',
name='hosts',
field=models.ManyToManyField(related_name='inventories', to='main.Host'),
),
migrations.AddField(
model_name='history',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', related_query_name='history', to='main.Project'),
),
migrations.AlterIndexTogether(
name='historylines',
index_together=set([('line_number',), ('history',), ('history', 'line_number')]),
),
migrations.AlterIndexTogether(
name='history',
index_together=set([('id', 'project', 'playbook', 'status', 'start_time', 'stop_time')]),
),
]
|
agpl-3.0
| -7,657,088,549,418,526,000
| 42.319249
| 183
| 0.563889
| false
| 4.481302
| false
| false
| false
|
av8ramit/tensorflow
|
tensorflow/contrib/py2tf/converters/side_effect_guards.py
|
1
|
6838
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Adds guards against function calls with side effects.
Only standalone calls are guarded.
WARNING: This mechanism is incomplete. Particularly, it only guards the
arguments passed to functions, and does not account for indirectly modified
state.
Example:
y = tf.layers.dense(x) # Creates TF variable 'foo'
loss = loss(y)
opt.minimize(loss) # indirectly affects 'foo'
z = tf.get_variable('foo') # Indirectly affects `loss` and 'foo'
# Here, `loss` can be guarded. But `z` cannot.
# TODO(mdan): We should probably define a safe mode where we guard everything.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from contextlib import contextmanager
import gast
from tensorflow.contrib.py2tf.pyct import anno
from tensorflow.contrib.py2tf.pyct import ast_util
from tensorflow.contrib.py2tf.pyct import qual_names
from tensorflow.contrib.py2tf.pyct import templates
from tensorflow.contrib.py2tf.pyct import transformer
from tensorflow.contrib.py2tf.pyct.static_analysis.annos import NodeAnno
class SymbolNamer(object):
"""Describes the interface for SideEffectGuardTransformer's namer."""
def new_symbol(self, name_root, reserved_locals):
"""Generate a new unique function_name.
Args:
name_root: String, used as stem in the new name.
reserved_locals: Set(string), additional local symbols that are reserved.
Returns:
String.
"""
raise NotImplementedError()
class SideEffectGuardTransformer(transformer.Base):
"""Adds control dependencies to functions with side effects."""
def __init__(self, context):
super(SideEffectGuardTransformer, self).__init__(context)
# pylint:disable=invalid-name
def _visit_and_reindent(self, nodes):
new_nodes = []
current_dest = new_nodes
alias_map = {}
reindent_requested = False
for n in nodes:
n = self.visit(n)
# NOTE: the order in which these statements execute is important; in
# particular, watch out for ending up with cycles in the AST.
if alias_map:
n = ast_util.rename_symbols(n, alias_map)
if isinstance(n, (list, tuple)):
current_dest.extend(n)
else:
current_dest.append(n)
if anno.hasanno(n, anno.Basic.INDENT_BLOCK_REMAINDER):
reindent_requested = True
new_dest, new_alias_map = anno.getanno(
n, anno.Basic.INDENT_BLOCK_REMAINDER)
anno.delanno(n, anno.Basic.INDENT_BLOCK_REMAINDER)
new_alias_map.update(alias_map)
alias_map = new_alias_map
current_dest = new_dest
if reindent_requested and not current_dest:
# TODO(mdan): There may still be something that could be done.
raise ValueError('Unable to insert statement into the computation flow: '
'it is not followed by any computation which '
'the statement could gate.')
return new_nodes
def visit_FunctionDef(self, node):
node.body = self._visit_and_reindent(node.body)
return node
def visit_With(self, node):
node.body = self._visit_and_reindent(node.body)
return node
def visit_If(self, node):
node.body = self._visit_and_reindent(node.body)
node.orelse = self._visit_and_reindent(node.orelse)
return node
def visit_While(self, node):
node.body = self._visit_and_reindent(node.body)
node.orelse = self._visit_and_reindent(node.orelse)
return node
def visit_Expr(self, node):
self.generic_visit(node)
if isinstance(node.value, gast.Call):
# Patterns of single function calls, like:
# opt.minimize(loss)
# or:
# tf.py_func(...)
# First, attempt to gate future evaluation of args. If that's not
# possible, gate all remaining statements (and that may fail too, see
# _visit_and_reindent.
args_scope = anno.getanno(node.value, NodeAnno.ARGS_SCOPE)
# NOTE: We can't guard object attributes because they may not be writable.
guarded_args = tuple(
s for s in args_scope.used if not s.is_composite())
# TODO(mdan): Include all arguments which depended on guarded_args too.
# For example, the following will still cause a race:
# tf.assign(a, a + 1)
# b = a + 1
# tf.assign(a, a + 1) # Control deps here should include `b`
# c = b + 1
# Or maybe we should just raise an "unsafe assign" error?
if guarded_args:
# The aliases may need new names to avoid incorrectly making them local.
# TODO(mdan): This is brutal. It will even rename modules - any fix?
need_alias = tuple(
s for s in guarded_args if s not in args_scope.parent.modified)
aliased_new_names = tuple(
qual_names.QN(
self.context.namer.new_symbol(
s.ssf(), args_scope.parent.referenced)) for s in need_alias)
alias_map = dict(zip(need_alias, aliased_new_names))
if len(guarded_args) == 1:
s, = guarded_args
aliased_guarded_args = alias_map.get(s, s)
else:
aliased_guarded_args = gast.Tuple(
[alias_map.get(s, s).ast() for s in guarded_args], None)
template = """
with py2tf_utils.control_dependency_on_returns(tf, call):
aliased_guarded_args = py2tf_utils.alias_tensors(tf, guarded_args)
"""
control_deps_guard = templates.replace(
template,
call=node.value,
aliased_guarded_args=aliased_guarded_args,
guarded_args=guarded_args)[-1]
else:
alias_map = {}
template = """
with py2tf_utils.control_dependency_on_returns(tf, call):
pass
"""
control_deps_guard = templates.replace(template, call=node.value)[-1]
control_deps_guard.body = []
node = control_deps_guard
anno.setanno(node, anno.Basic.INDENT_BLOCK_REMAINDER,
(node.body, alias_map))
return node
# pylint:enable=invalid-name
def transform(node, context):
return SideEffectGuardTransformer(context).visit(node)
|
apache-2.0
| 6,557,334,343,230,137,000
| 35.179894
| 80
| 0.652676
| false
| 3.75508
| false
| false
| false
|
wathen/PhD
|
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/ParamTests/MHDfluid.py
|
1
|
13757
|
#!/usr/bin/python
# interpolate scalar gradient onto nedelec space
import petsc4py
import sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
from dolfin import *
Print = PETSc.Sys.Print
# from MatrixOperations import *
import numpy as np
#import matplotlib.pylab as plt
import PETScIO as IO
import common
import scipy
import scipy.io
import time
import BiLinear as forms
import IterOperations as Iter
import MatrixOperations as MO
import CheckPetsc4py as CP
import ExactSol
import Solver as S
import MHDmatrixPrecondSetup as PrecondSetup
import NSprecondSetup
import MHDprec as MHDpreconditioner
import memory_profiler
import gc
import MHDmulti
import MHDmatrixSetup as MHDsetup
#@profile
def foo():
m = 10
errL2u =np.zeros((m-1,1))
errH1u =np.zeros((m-1,1))
errL2p =np.zeros((m-1,1))
errL2b =np.zeros((m-1,1))
errCurlb =np.zeros((m-1,1))
errL2r =np.zeros((m-1,1))
errH1r =np.zeros((m-1,1))
l2uorder = np.zeros((m-1,1))
H1uorder =np.zeros((m-1,1))
l2porder = np.zeros((m-1,1))
l2border = np.zeros((m-1,1))
Curlborder =np.zeros((m-1,1))
l2rorder = np.zeros((m-1,1))
H1rorder = np.zeros((m-1,1))
NN = np.zeros((m-1,1))
DoF = np.zeros((m-1,1))
Velocitydim = np.zeros((m-1,1))
Magneticdim = np.zeros((m-1,1))
Pressuredim = np.zeros((m-1,1))
Lagrangedim = np.zeros((m-1,1))
Wdim = np.zeros((m-1,1))
iterations = np.zeros((m-1,1))
SolTime = np.zeros((m-1,1))
udiv = np.zeros((m-1,1))
MU = np.zeros((m-1,1))
level = np.zeros((m-1,1))
NSave = np.zeros((m-1,1))
Mave = np.zeros((m-1,1))
TotalTime = np.zeros((m-1,1))
nn = 2
dim = 2
ShowResultPlots = 'yes'
split = 'Linear'
MU[0]= 1e0
for xx in xrange(1,m):
print xx
level[xx-1] = xx+ 0
nn = 2**(level[xx-1])
# Create mesh and define function space
nn = int(nn)
NN[xx-1] = nn/2
# parameters["form_compiler"]["quadrature_degree"] = 6
# parameters = CP.ParameterSetup()
mesh = UnitSquareMesh(nn,nn)
order = 1
parameters['reorder_dofs_serial'] = False
Velocity = VectorFunctionSpace(mesh, "CG", order)
Pressure = FunctionSpace(mesh, "DG", order-1)
Magnetic = FunctionSpace(mesh, "N1curl", order)
Lagrange = FunctionSpace(mesh, "CG", order)
W = MixedFunctionSpace([Velocity, Pressure, Magnetic,Lagrange])
# W = Velocity*Pressure*Magnetic*Lagrange
Velocitydim[xx-1] = Velocity.dim()
Pressuredim[xx-1] = Pressure.dim()
Magneticdim[xx-1] = Magnetic.dim()
Lagrangedim[xx-1] = Lagrange.dim()
Wdim[xx-1] = W.dim()
print "\n\nW: ",Wdim[xx-1],"Velocity: ",Velocitydim[xx-1],"Pressure: ",Pressuredim[xx-1],"Magnetic: ",Magneticdim[xx-1],"Lagrange: ",Lagrangedim[xx-1],"\n\n"
dim = [Velocity.dim(), Pressure.dim(), Magnetic.dim(), Lagrange.dim()]
def boundary(x, on_boundary):
return on_boundary
u0, p0,b0, r0, Laplacian, Advection, gradPres,CurlCurl, gradR, NS_Couple, M_Couple = ExactSol.MHD2D(4,1)
bcu = DirichletBC(W.sub(0),u0, boundary)
bcb = DirichletBC(W.sub(2),b0, boundary)
bcr = DirichletBC(W.sub(3),r0, boundary)
# bc = [u0,p0,b0,r0]
bcs = [bcu,bcb,bcr]
FSpaces = [Velocity,Pressure,Magnetic,Lagrange]
(u, b, p, r) = TrialFunctions(W)
(v, c, q, s) = TestFunctions(W)
kappa = 1.0
Mu_m =1e1
MU = 1.0/1
IterType = 'CD'
Split = "Yes"
Saddle = "No"
Stokes = "No"
F_NS = -MU*Laplacian+Advection+gradPres-kappa*NS_Couple
if kappa == 0:
F_M = Mu_m*CurlCurl+gradR -kappa*M_Couple
else:
F_M = Mu_m*kappa*CurlCurl+gradR -kappa*M_Couple
params = [kappa,Mu_m,MU]
# MO.PrintStr("Preconditioning MHD setup",5,"+","\n\n","\n\n")
Hiptmairtol = 1e-5
HiptmairMatrices = PrecondSetup.MagneticSetup(Magnetic, Lagrange, b0, r0, Hiptmairtol, params)
MO.PrintStr("Setting up MHD initial guess",5,"+","\n\n","\n\n")
u_k,p_k,b_k,r_k = common.InitialGuess(FSpaces,[u0,p0,b0,r0],[F_NS,F_M],params,HiptmairMatrices,1e-6,Neumann=Expression(("0","0")),options ="New", FS = "DG")
#plot(p_k, interactive = True)
b_t = TrialFunction(Velocity)
c_t = TestFunction(Velocity)
#print assemble(inner(b,c)*dx).array().shape
#print mat
#ShiftedMass = assemble(inner(mat*b,c)*dx)
#as_vector([inner(b,c)[0]*b_k[0],inner(b,c)[1]*(-b_k[1])])
ones = Function(Pressure)
ones.vector()[:]=(0*ones.vector().array()+1)
# pConst = - assemble(p_k*dx)/assemble(ones*dx)
p_k.vector()[:] += - assemble(p_k*dx)/assemble(ones*dx)
x = Iter.u_prev(u_k,p_k,b_k,r_k)
KSPlinearfluids, MatrixLinearFluids = PrecondSetup.FluidLinearSetup(Pressure, MU)
kspFp, Fp = PrecondSetup.FluidNonLinearSetup(Pressure, MU, u_k)
#plot(b_k)
ns,maxwell,CoupleTerm,Lmaxwell,Lns = forms.MHD2D(mesh, FSpaces,F_M,F_NS, u_k,b_k,params,IterType,"DG",Saddle,Stokes)
RHSform = forms.PicardRHS(mesh, FSpaces, u_k, p_k, b_k, r_k, params,"DG",Saddle,Stokes)
bcu = DirichletBC(Velocity,Expression(("0.0","0.0")), boundary)
bcb = DirichletBC(Magnetic,Expression(("0.0","0.0")), boundary)
bcr = DirichletBC(Lagrange,Expression(("0.0")), boundary)
bcs = [bcu,bcb,bcr]
parameters['linear_algebra_backend'] = 'uBLAS'
SetupType = 'Matrix'
BC = MHDsetup.BoundaryIndices(mesh)
eps = 1.0 # error measure ||u-u_k||
tol = 1.0E-4 # tolerance
iter = 0 # iteration counter
maxiter = 40 # max no of iterations allowed
SolutionTime = 0
outer = 0
# parameters['linear_algebra_backend'] = 'uBLAS'
# FSpaces = [Velocity,Magnetic,Pressure,Lagrange]
if IterType == "CD":
MO.PrintStr("Setting up PETSc "+SetupType,2,"=","\n","\n")
Alin = MHDsetup.Assemble(W,ns,maxwell,CoupleTerm,Lns,Lmaxwell,RHSform,bcs+BC, "Linear",IterType)
Fnlin,b = MHDsetup.Assemble(W,ns,maxwell,CoupleTerm,Lns,Lmaxwell,RHSform,bcs+BC, "NonLinear",IterType)
A = Fnlin+Alin
A,b = MHDsetup.SystemAssemble(FSpaces,A,b,SetupType,IterType)
u = b.duplicate()
u_is = PETSc.IS().createGeneral(range(Velocity.dim()))
NS_is = PETSc.IS().createGeneral(range(Velocity.dim()+Pressure.dim()))
M_is = PETSc.IS().createGeneral(range(Velocity.dim()+Pressure.dim(),W.dim()))
OuterTol = 1e-5
InnerTol = 1e-5
NSits =0
Mits =0
TotalStart =time.time()
SolutionTime = 0
while eps > tol and iter < maxiter:
iter += 1
MO.PrintStr("Iter "+str(iter),7,"=","\n\n","\n\n")
AssembleTime = time.time()
if IterType == "CD":
MO.StrTimePrint("MHD CD RHS assemble, time: ", time.time()-AssembleTime)
b = MHDsetup.Assemble(W,ns,maxwell,CoupleTerm,Lns,Lmaxwell,RHSform,bcs+BC, "CD",IterType)
else:
MO.PrintStr("Setting up PETSc "+SetupType,2,"=","\n","\n")
if iter == 1:
Alin = MHDsetup.Assemble(W,ns,maxwell,CoupleTerm,Lns,Lmaxwell,RHSform,bcs+BC, "Linear",IterType)
Fnlin,b = MHDsetup.Assemble(W,ns,maxwell,CoupleTerm,Lns,Lmaxwell,RHSform,bcs+BC, "NonLinear",IterType)
A = Fnlin+Alin
A,b = MHDsetup.SystemAssemble(FSpaces,A,b,SetupType,IterType)
u = b.duplicate()
else:
Fnline,b = MHDsetup.Assemble(W,ns,maxwell,CoupleTerm,Lns,Lmaxwell,RHSform,bcs+BC, "NonLinear",IterType)
A = Fnlin+Alin
A,b = MHDsetup.SystemAssemble(FSpaces,A,b,SetupType,IterType)
# AA, bb = assemble_system(maxwell+ns+CoupleTerm, (Lmaxwell + Lns) - RHSform, bcs)
# A,b = CP.Assemble(AA,bb)
# if iter == 1:
MO.StrTimePrint("MHD total assemble, time: ", time.time()-AssembleTime)
kspFp, Fp = PrecondSetup.FluidNonLinearSetup(Pressure, MU, u_k)
print "Inititial guess norm: ", u.norm()
u = b.duplicate()
#A,Q
if IterType == 'Full':
n = FacetNormal(mesh)
mat = as_matrix([[b_k[1]*b_k[1],-b_k[1]*b_k[0]],[-b_k[1]*b_k[0],b_k[0]*b_k[0]]])
F = CP.Scipy2PETSc(Fnlin[0])
a = params[2]*inner(grad(b_t), grad(c_t))*dx(W.mesh()) + inner((grad(b_t)*u_k),c_t)*dx(W.mesh()) +(1/2)*div(u_k)*inner(c_t,b_t)*dx(W.mesh()) - (1/2)*inner(u_k,n)*inner(c_t,b_t)*ds(W.mesh())+kappa/Mu_m*inner(mat*b_t,c_t)*dx(W.mesh())
ShiftedMass = assemble(a)
bcu.apply(ShiftedMass)
kspF = NSprecondSetup.LSCKSPnonlinear(F)
else:
F = CP.Scipy2PETSc(Fnlin[0])
kspF = NSprecondSetup.LSCKSPnonlinear(F)
stime = time.time()
u, mits,nsits = S.solve(A,b,u,params,W,'Direct',IterType,OuterTol,InnerTol,HiptmairMatrices,Hiptmairtol,KSPlinearfluids, Fp,kspF)
Soltime = time.time()- stime
Mits += mits
NSits += nsits
SolutionTime += Soltime
u1, p1, b1, r1, eps= Iter.PicardToleranceDecouple(u,x,FSpaces,dim,"2",iter)
p1.vector()[:] += - assemble(p1*dx)/assemble(ones*dx)
u_k.assign(u1)
p_k.assign(p1)
b_k.assign(b1)
r_k.assign(r1)
uOld= np.concatenate((u_k.vector().array(),p_k.vector().array(),b_k.vector().array(),r_k.vector().array()), axis=0)
x = IO.arrayToVec(uOld)
XX= np.concatenate((u_k.vector().array(),p_k.vector().array(),b_k.vector().array(),r_k.vector().array()), axis=0)
SolTime[xx-1] = SolutionTime/iter
NSave[xx-1] = (float(NSits)/iter)
Mave[xx-1] = (float(Mits)/iter)
iterations[xx-1] = iter
TotalTime[xx-1] = time.time() - TotalStart
dim = [Velocity.dim(), Pressure.dim(), Magnetic.dim(),Lagrange.dim()]
#
# ExactSolution = [u0,p0,b0,r0]
# errL2u[xx-1], errH1u[xx-1], errL2p[xx-1], errL2b[xx-1], errCurlb[xx-1], errL2r[xx-1], errH1r[xx-1] = Iter.Errors(XX,mesh,FSpaces,ExactSolution,order,dim, "DG")
#
# if xx > 1:
# l2uorder[xx-1] = np.abs(np.log2(errL2u[xx-2]/errL2u[xx-1]))
# H1uorder[xx-1] = np.abs(np.log2(errH1u[xx-2]/errH1u[xx-1]))
#
# l2porder[xx-1] = np.abs(np.log2(errL2p[xx-2]/errL2p[xx-1]))
#
# l2border[xx-1] = np.abs(np.log2(errL2b[xx-2]/errL2b[xx-1]))
# Curlborder[xx-1] = np.abs(np.log2(errCurlb[xx-2]/errCurlb[xx-1]))
#
# l2rorder[xx-1] = np.abs(np.log2(errL2r[xx-2]/errL2r[xx-1]))
# H1rorder[xx-1] = np.abs(np.log2(errH1r[xx-2]/errH1r[xx-1]))
#
#
#
#
# import pandas as pd
#
#
#
# LatexTitles = ["l","DoFu","Dofp","V-L2","L2-order","V-H1","H1-order","P-L2","PL2-order"]
# LatexValues = np.concatenate((level,Velocitydim,Pressuredim,errL2u,l2uorder,errH1u,H1uorder,errL2p,l2porder), axis=1)
# LatexTable = pd.DataFrame(LatexValues, columns = LatexTitles)
# pd.set_option('precision',3)
# LatexTable = MO.PandasFormat(LatexTable,"V-L2","%2.4e")
# LatexTable = MO.PandasFormat(LatexTable,'V-H1',"%2.4e")
# LatexTable = MO.PandasFormat(LatexTable,"H1-order","%1.2f")
# LatexTable = MO.PandasFormat(LatexTable,'L2-order',"%1.2f")
# LatexTable = MO.PandasFormat(LatexTable,"P-L2","%2.4e")
# LatexTable = MO.PandasFormat(LatexTable,'PL2-order',"%1.2f")
# print LatexTable
#
#
# print "\n\n Magnetic convergence"
# MagneticTitles = ["l","B DoF","R DoF","B-L2","L2-order","B-Curl","HCurl-order"]
# MagneticValues = np.concatenate((level,Magneticdim,Lagrangedim,errL2b,l2border,errCurlb,Curlborder),axis=1)
# MagneticTable= pd.DataFrame(MagneticValues, columns = MagneticTitles)
# pd.set_option('precision',3)
# MagneticTable = MO.PandasFormat(MagneticTable,"B-Curl","%2.4e")
# MagneticTable = MO.PandasFormat(MagneticTable,'B-L2',"%2.4e")
# MagneticTable = MO.PandasFormat(MagneticTable,"L2-order","%1.2f")
# MagneticTable = MO.PandasFormat(MagneticTable,'HCurl-order',"%1.2f")
# print MagneticTable
#
import pandas as pd
print "\n\n Iteration table"
if IterType == "Full":
IterTitles = ["l","DoF","AV solve Time","Total picard time","picard iterations","Av Outer its","Av Inner its",]
else:
IterTitles = ["l","DoF","AV solve Time","Total picard time","picard iterations","Av NS iters","Av M iters"]
IterValues = np.concatenate((level,Wdim,SolTime,TotalTime,iterations,Mave,NSave),axis=1)
IterTable= pd.DataFrame(IterValues, columns = IterTitles)
if IterType == "Full":
IterTable = MO.PandasFormat(IterTable,'Av Outer its',"%2.1f")
IterTable = MO.PandasFormat(IterTable,'Av Inner its',"%2.1f")
else:
IterTable = MO.PandasFormat(IterTable,'Av NS iters',"%2.1f")
IterTable = MO.PandasFormat(IterTable,'Av M iters',"%2.1f")
print IterTable.to_latex()
print " \n Outer Tol: ",OuterTol, "Inner Tol: ", InnerTol
# # # if (ShowResultPlots == 'yes'):
# plot(u_k)
# plot(interpolate(u0,Velocity))
#
# plot(p_k)
#
# plot(interpolate(p0,Pressure))
#
# plot(b_k)
# plot(interpolate(b0,Magnetic))
#
# plot(r_k)
# plot(interpolate(r0,Lagrange))
#
# interactive()
interactive()
foo()
|
mit
| -2,145,468,271,991,474,000
| 36.080863
| 248
| 0.582758
| false
| 2.689015
| false
| false
| false
|
makerdao/maker.py
|
pymaker/vault.py
|
1
|
2652
|
# This file is part of Maker Keeper Framework.
#
# Copyright (C) 2017-2018 reverendus
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from web3 import Web3
from pymaker import Contract, Address, Transact
class DSVault(Contract):
"""A client for the `DSVault` contract.
You can find the source code of the `DSVault` contract here:
<https://github.com/dapphub/ds-vault>.
Attributes:
web3: An instance of `Web` from `web3.py`.
address: Ethereum address of the `DSVault` contract.
"""
abi = Contract._load_abi(__name__, 'abi/DSVault.abi')
bin = Contract._load_bin(__name__, 'abi/DSVault.bin')
def __init__(self, web3: Web3, address: Address):
assert(isinstance(web3, Web3))
assert(isinstance(address, Address))
self.web3 = web3
self.address = address
self._contract = self._get_contract(web3, self.abi, address)
@staticmethod
def deploy(web3: Web3):
"""Deploy a new instance of the `DSVault` contract.
Args:
web3: An instance of `Web` from `web3.py`.
Returns:
A `DSVault` class instance.
"""
return DSVault(web3=web3, address=Contract._deploy(web3, DSVault.abi, DSVault.bin, []))
def authority(self) -> Address:
"""Return the current `authority` of a `DSAuth`-ed contract.
Returns:
The address of the current `authority`.
"""
return Address(self._contract.call().authority())
def set_authority(self, address: Address) -> Transact:
"""Set the `authority` of a `DSAuth`-ed contract.
Args:
address: The address of the new `authority`.
Returns:
A :py:class:`pymaker.Transact` instance, which can be used to trigger the transaction.
"""
assert(isinstance(address, Address))
return Transact(self, self.web3, self.abi, self.address, self._contract, 'setAuthority', [address.address])
def __repr__(self):
return f"DSVault('{self.address}')"
|
agpl-3.0
| 8,156,267,412,930,917,000
| 33
| 115
| 0.654223
| false
| 3.82684
| false
| false
| false
|
redeyser/IceCash2
|
clientEgais.py
|
1
|
39275
|
#!/usr/bin/python
# -*- coding: utf-8
import httplib, urllib,time
import requests
import xml.etree.ElementTree as etree
import re
from icelog import *
from my import curdate2my
from datetime import datetime
import dbIceCash as db
ns={\
"c":"http://fsrar.ru/WEGAIS/Common",\
"wbr":"http://fsrar.ru/WEGAIS/TTNInformF2Reg",\
"pref":"http://fsrar.ru/WEGAIS/ProductRef_v2",\
"oref":"http://fsrar.ru/WEGAIS/ClientRef_v2",\
"rc":"http://fsrar.ru/WEGAIS/ReplyClient_v2",\
"ns":"http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01",\
"wb":"http://fsrar.ru/WEGAIS/TTNSingle_v2",\
"xsi":"http://www.w3.org/2001/XMLSchema-instance",\
"wt":"http://fsrar.ru/WEGAIS/ConfirmTicket",
"qp":"http://fsrar.ru/WEGAIS/QueryParameters",\
'tc':"http://fsrar.ru/WEGAIS/Ticket",\
"rst":"http://fsrar.ru/WEGAIS/ReplyRests_v2",\
'wa':"http://fsrar.ru/WEGAIS/ActTTNSingle_v2",\
'ttn':"http://fsrar.ru/WEGAIS/ReplyNoAnswerTTN",\
'qp':"http://fsrar.ru/WEGAIS/InfoVersionTTN"
}
XML_VERSION=u"""<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<ns:Documents Version=\"1.0\"
xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"
xmlns:ns=\"http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01\"
xmlns:qp=\"http://fsrar.ru/WEGAIS/InfoVersionTTN\">
<ns:Owner>
<ns:FSRAR_ID>%fsrar_id%</ns:FSRAR_ID>
</ns:Owner>
<ns:Document>
<ns:InfoVersionTTN>
<qp:ClientId>%fsrar_id%</qp:ClientId>
<qp:WBTypeUsed>%VERSION%</qp:WBTypeUsed>
</ns:InfoVersionTTN>
</ns:Document>
</ns:Documents>
"""
XML_GET_CLIENTS=u"""<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<ns:Documents Version=\"1.0\"
xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"
xmlns:ns=\"http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01\"
xmlns:oref=\"http://fsrar.ru/WEGAIS/ClientRef_v2\"
xmlns:qp=\"http://fsrar.ru/WEGAIS/QueryParameters\">
<ns:Owner>
<ns:FSRAR_ID>%fsrar_id%</ns:FSRAR_ID>
</ns:Owner>
<ns:Document>
<ns:QueryClients_v2>
<qp:Parameters>
<qp:Parameter>
<qp:Name>ИНН</qp:Name>
<qp:Value>%INN%</qp:Value>
</qp:Parameter>
</qp:Parameters>
</ns:QueryClients_v2>
</ns:Document>
</ns:Documents>
"""
XML_SEND_WAYBILL_HEAD="""<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<ns:Documents Version="1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:ns= "http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01"
xmlns:c="http://fsrar.ru/WEGAIS/Common"
xmlns:oref="http://fsrar.ru/WEGAIS/ClientRef_v2"
xmlns:pref="http://fsrar.ru/WEGAIS/ProductRef_v2"
xmlns:wb="http://fsrar.ru/WEGAIS/TTNSingle_v2">
<ns:Owner>
<ns:FSRAR_ID>%fsrar_id%</ns:FSRAR_ID>
</ns:Owner>
<ns:Document>
<ns:WayBill_v2>
<wb:Identity>%identity%</wb:Identity>
<wb:Header>
<wb:NUMBER>%number%</wb:NUMBER>
<wb:Date>%dt%</wb:Date>
<wb:ShippingDate>%dt%</wb:ShippingDate>
<wb:Type>%type%</wb:Type>
<wb:Shipper>
<oref:UL>
<oref:INN>%inn%</oref:INN><oref:KPP>%kpp%</oref:KPP><oref:ClientRegId>%regid%</oref:ClientRegId>
<oref:ShortName>%name%</oref:ShortName><oref:FullName>%name%</oref:FullName>
<oref:address>
<oref:Country>643</oref:Country><oref:RegionCode>42</oref:RegionCode>
<oref:description></oref:description>
</oref:address>
</oref:UL>
</wb:Shipper>
<wb:Consignee>
<oref:UL>
<oref:INN>%send_inn%</oref:INN><oref:KPP>%send_kpp%</oref:KPP><oref:ClientRegId>%send_regid%</oref:ClientRegId>
<oref:ShortName>%send_name%</oref:ShortName><oref:FullName>%send_name%</oref:FullName>
<oref:address>
<oref:Country>643</oref:Country><oref:RegionCode>42</oref:RegionCode>
<oref:description></oref:description>
</oref:address>
</oref:UL>
</wb:Consignee>
<wb:Transport>
<wb:TRAN_TYPE></wb:TRAN_TYPE>
<wb:TRAN_COMPANY></wb:TRAN_COMPANY>
<wb:TRAN_TRAILER></wb:TRAN_TRAILER>
<wb:TRAN_CAR></wb:TRAN_CAR>
<wb:TRAN_CUSTOMER></wb:TRAN_CUSTOMER>
<wb:TRAN_DRIVER></wb:TRAN_DRIVER>
<wb:TRAN_LOADPOINT></wb:TRAN_LOADPOINT>
<wb:TRAN_UNLOADPOINT></wb:TRAN_UNLOADPOINT>
<wb:TRAN_FORWARDER></wb:TRAN_FORWARDER>
<wb:TRAN_REDIRECT></wb:TRAN_REDIRECT>
</wb:Transport>
<wb:Base>waybill doc</wb:Base>
<wb:Note>NOTE</wb:Note>
</wb:Header>
<wb:Content>
%content%
</wb:Content>
</ns:WayBill_v2>
</ns:Document>
</ns:Documents>
"""
XML_SEND_WAYBILL_CONTENT="""
<wb:Position>
<wb:Quantity>%quantity%</wb:Quantity><wb:Price>%price%</wb:Price><wb:Identity>%identity%</wb:Identity>
<wb:InformF1><pref:RegId>%inform_a%</pref:RegId></wb:InformF1>
<wb:InformF2><pref:InformF2Item><pref:F2RegId>%inform_b%</pref:F2RegId></pref:InformF2Item></wb:InformF2>
<wb:Product>
<pref:Type>%pref_type%</pref:Type><pref:FullName>%shortname%</pref:FullName>
<pref:ShortName>%shortname%</pref:ShortName>
<pref:AlcCode>%alccode%</pref:AlcCode>
<pref:Capacity>%capacity%</pref:Capacity>
<pref:AlcVolume>%alcvolume%</pref:AlcVolume>
<pref:ProductVCode>%productvcode%</pref:ProductVCode>
<pref:UnitType>%packet%</pref:UnitType>
<pref:Producer>
<oref:UL>
<oref:INN>%inn%</oref:INN><oref:KPP>%kpp%</oref:KPP>
<oref:ClientRegId>%regid%</oref:ClientRegId><oref:ShortName>%oref_shortname%</oref:ShortName>
<oref:FullName>%oref_shortname%</oref:FullName>
<oref:address>
<oref:Country>643</oref:Country><oref:RegionCode>42</oref:RegionCode><oref:description></oref:description>
</oref:address>
</oref:UL>
</pref:Producer>
</wb:Product>
</wb:Position>
"""
XML_SEND_ACT="""<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<ns:Documents Version=\"1.0\"
xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"
xmlns:ns= \"http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01\"
xmlns:oref=\"http://fsrar.ru/WEGAIS/ClientRef_v2\"
xmlns:pref=\"http://fsrar.ru/WEGAIS/ProductRef_v2\"
xmlns:wa= \"http://fsrar.ru/WEGAIS/ActTTNSingle_v2\">
<ns:Owner>
<ns:FSRAR_ID>%fsrar_id%</ns:FSRAR_ID>
</ns:Owner>
<ns:Document>
<ns:WayBillAct_v2>
<wa:Header>
<wa:IsAccept>%accept%</wa:IsAccept>
<wa:ACTNUMBER>%iddoc%</wa:ACTNUMBER>
<wa:ActDate>%date%</wa:ActDate>
<wa:WBRegId>%wb_RegId%</wa:WBRegId>
<wa:Note></wa:Note>
</wa:Header>
<wa:Content>
%content%
</wa:Content>
</ns:WayBillAct_v2>
</ns:Document>
</ns:Documents>
"""
XML_ACT_CONTENT="""
<wa:Position>
\t<wa:Identity>%identity%</wa:Identity>
\t<wa:InformF2RegId>%wb_RegId%</wa:InformF2RegId>
\t<wa:RealQuantity>%real%</wa:RealQuantity>
</wa:Position>
"""
XML_CHECK="""<?xml version="1.0" encoding="UTF-8"?>
<Cheque
inn="%inn%"
datetime="%datetime%"
kpp="%kpp%"
kassa="%kassa%"
address="%address%"
name="%name%"
number="%ncheck%"
shift="1"
>
%bottles%
</Cheque>
"""
XML_BOTTLE="""
\t<Bottle barcode="%barcode%"
\tean="%ean%" price="%price%" %litrag%/>
"""
XML_GET_OSTAT="""<?xml version="1.0" encoding="UTF-8"?>
<ns:Documents Version="1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:ns="http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01"
xmlns:qp="http://fsrar.ru/WEGAIS/QueryParameters">
<ns:Owner>
<ns:FSRAR_ID>%fsrar_id%</ns:FSRAR_ID>
</ns:Owner>
<ns:Document>
<ns:QueryRests_v2></ns:QueryRests_v2>
</ns:Document>
</ns:Documents>
"""
XML_GET_REPLY="""<?xml version="1.0" encoding="UTF-8"?>
<ns:Documents Version="1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:ns="http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01"
xmlns:qp="http://fsrar.ru/WEGAIS/QueryParameters"
>
<ns:Owner>
<ns:FSRAR_ID>%fsrar_id%</ns:FSRAR_ID>
</ns:Owner>
<ns:Document>
<ns:QueryResendDoc>
<qp:Parameters>
<qp:Parameter>
<qp:Name>WBREGID</qp:Name>
<qp:Value>%ttn%</qp:Value>
</qp:Parameter>
</qp:Parameters>
</ns:QueryResendDoc>
</ns:Document>
</ns:Documents>
"""
XML_GET_NATTN="""<?xml version="1.0" encoding="UTF-8"?>
<ns:Documents Version="1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:ns="http://fsrar.ru/WEGAIS/WB_DOC_SINGLE_01"
xmlns:qp="http://fsrar.ru/WEGAIS/QueryParameters">
<ns:Owner>
<ns:FSRAR_ID>%fsrar_id%</ns:FSRAR_ID>
</ns:Owner>
<ns:Document>
<ns:QueryNATTN>
<qp:Parameters>
<qp:Parameter>
<qp:Name>КОД</qp:Name>
<qp:Value>%fsrar_id%</qp:Value>
</qp:Parameter>
</qp:Parameters>
</ns:QueryNATTN>
</ns:Document>
</ns:Documents>
"""
def findUL(node):
result = node.find("oref:UL",ns)
if result == None:
result = node.find("oref:FO",ns)
return result
class EgaisClient:
def __init__(self,server_ip,server_port,db):
self.server_ip=server_ip
self.server_port=server_port
self.db=db
def assm(self,page):
return "http://%s:%d%s" % (self.server_ip,self.server_port,page)
def _delete(self,page):
print "delete %s" % page
requests.delete(page)
return True
def _get(self,page):
self.data=""
try:
r = requests.get(page)
if r.status_code!=200:
print "error_status"
return False
self.data=r.text.encode("utf8")
except:
return False
return True
def _post(self,page,params):
self.data=""
r=requests.post(page, data=params)
self.data=r.content
if r.status_code!=200:
print "error_status"
return False
return True
def _sendfile(self,url,pname,fname):
files = {pname : open(fname, 'rb')}
r = requests.post(url, files=files)
if r.status_code!=200:
print "error_status"
self.data=r.content
return False
self.data=r.content
return True
def _connect(self):
if self._get(self.assm("/")):
r=re.search("FSRAR-RSA-(\d+)",self.data)
if not r:
return False
self.fsrar_id=r.group(1)
return True
else:
self.fsrar_id=""
return False
def _sendxml(self,fname,page,xml):
f=open(fname,"w")
f.write(xml)
f.close()
return self._sendfile(self.assm(page),'xml_file',fname)
def _send_places(self):
if not self._connect():
return False
xml=XML_GET_CLIENTS.replace("%INN%",self.db.sets['inn'])
xml=xml.replace("%fsrar_id%",self.fsrar_id).encode("utf8")
r=self._sendxml("client.xml","/opt/in/QueryClients_v2",xml)
return r
def _send_ostat(self):
if not self._connect():
return False
xml=XML_GET_OSTAT.replace("%INN%",self.db.sets['inn'])
xml=xml.replace("%fsrar_id%",self.fsrar_id).encode("utf8")
r=self._sendxml("rest.xml","/opt/in/QueryRests_v2",xml)
return r
def _send_reply(self,ttn):
if not self._connect():
return False
xml=XML_GET_REPLY.replace("%ttn%",ttn)
xml=xml.replace("%fsrar_id%",self.fsrar_id).encode("utf8")
r=self._sendxml("reply.xml","/opt/in/QueryResendDoc",xml)
return r
def _send_nattn(self):
if not self._connect():
return False
#self.db._truncate(db.TB_EGAIS_DOCS_NEED)
xml=XML_GET_NATTN.replace("%fsrar_id%",self.fsrar_id)
#.encode("utf8")
r=self._sendxml("nattn.xml","/opt/in/QueryNATTN",xml)
return r
def _send_version(self,version):
if not self._connect():
return False
if version==1:
ver="WayBill"
else:
ver="WayBill_v2"
xml=XML_VERSION.replace("%VERSION%",ver)
xml=xml.replace("%fsrar_id%",self.fsrar_id).encode("utf8")
r=self._sendxml("version.xml","/opt/in/InfoVersionTTN",xml)
return r
def _get_ticket(self):
self.sign=""
#print self.data
tree=etree.fromstring(self.data)
url = tree.find("url")
sign = tree.find("sign")
if url==None:
return ""
if sign!=None:
self.sign=sign.text
return url.text
def _send_act(self,id):
if not self._connect():
return False
xml=self._make_act(id)
if xml=="":
return False
r=self._sendxml("client.xml","/opt/in/WayBillAct_v2",xml)
reply_id=self._get_ticket()
if reply_id!="":
self.db.egais_docs_hd_upd(id,{'status':3,'reply_id':reply_id})
return r
def _send_return(self,id):
if not self._connect():
return False
xml=self._make_return(id)
if xml=="":
return False
r=self._sendxml("return.xml","/opt/in/WayBill_v2",xml)
#print r
reply_id=self._get_ticket()
if reply_id!="":
self.db.egais_docs_hd_upd(id,{'status':3,'reply_id':reply_id})
return r
def _send_check(self,_type,ncheck,pos):
if not self._connect():
return False
xml=self._make_check(_type,ncheck,pos)
if xml=="":
return False
print "-"*80
print xml
print "-"*80
#return False
r=self._sendxml("cheque.xml","/xml",xml)
self.url=self._get_ticket()
if self.url=="" or self.sign=="":
return False
return r
def _send_move(self,id):
if not self._connect():
return False
xml=self._make_move(id)
if xml=="":
return False
r=self._sendxml("move.xml","/opt/in/WayBill_v2",xml)
reply_id=self._get_ticket()
if reply_id!="":
self.db.egais_docs_hd_upd(id,{'status':3,'reply_id':reply_id})
return r
def _create_return(self,id,idd):
if self.db.egais_get_mydoc(id):
struct={\
"type":1,\
"status":1,\
"ns_FSRAR_ID" :self.db.egais_doc_hd['recv_RegId'],\
"wb_Identity" :"0",\
"ns_typedoc" :"WayBill_v2",\
"wb_Date" :curdate2my(),\
"wb_ShippingDate" :curdate2my(),\
"wb_Type" :"WBReturnFromMe",\
"wb_UnitType" :self.db.egais_doc_hd['wb_UnitType'],\
"send_INN" :self.db.egais_doc_hd['recv_INN'],\
"send_KPP" :self.db.egais_doc_hd['recv_KPP'],\
"send_ShortName" :self.db.egais_doc_hd['recv_ShortName'],\
"send_RegId" :self.db.egais_doc_hd['recv_RegId'],\
"recv_INN" :self.db.egais_doc_hd['send_INN'],\
"recv_KPP" :self.db.egais_doc_hd['send_KPP'],\
"recv_ShortName" :self.db.egais_doc_hd['send_ShortName'],\
"recv_RegId" :self.db.egais_doc_hd['send_RegId'],\
}
id=self.db.egais_docs_hd_add(struct)
if id==0:
return False
self.db.egais_docs_hd_upd(id,{"wb_Identity":str(id),"wb_NUMBER":u"В"+self.db.sets['idplace'].rjust(3,"0")+str(id).rjust(4,"0")} )
for rec in self.db.egais_doc_ct:
if int(rec['id'])==idd:
struct=rec
struct["iddoc"]=id
struct["wb_Identity"]="1"
struct["pref_Type"]=u"АП"
del struct['id']
self.db.egais_docs_ct_add(struct)
return True
else:
return False
def _delete_in(self,id):
for d in self.data_url_in:
if id==d['idd']:
self._delete(d['url'])
def _get_docs_in(self):
self.data_url_in=[]
if self._get(self.assm("/opt/in")):
try:
d=etree.fromstring(self.data)
except:
return False
for t in d:
if t.tag!='url':
continue
if t.attrib.has_key('replyId'):
id=t.attrib['replyId']
else:
id=""
url=t.text
self.data_url_in.append({'idd':id,'url':url})
return True
else:
return False
def _get_docs_out(self):
self.data_url=[]
if self._get(self.assm("/opt/out")):
try:
d=etree.fromstring(self.data)
except:
return False
for t in d:
if t.tag!='url':
continue
if t.attrib.has_key('replyId'):
id=t.attrib['replyId']
else:
id=""
url=t.text
self.data_url.append({'idd':id,'url':url})
return True
else:
return False
def _dodoc(self):
res={}
for d in self.data_url:
id=d['idd']
url=d['url']
if not self._get(url):
continue
addLog('/var/log/egaisLog.xml',self.data)
tree=etree.fromstring(self.data)
doc = tree.find("ns:Document",ns)
if doc==None:
continue
typedoc=doc[0].tag
#print typedoc
if typedoc=="{%s}ConfirmTicket" % ns["ns"]:
if self._addConfirmTicket(url,id,tree):
if res.has_key("ConfirmTicket"):
res['ConfirmTicket']+=1
else:
res['ConfirmTicket']=1
print "ConfirmTicket"
self._delete_in(id)
self._delete(url)
pass
if typedoc=="{%s}Ticket" % ns["ns"]:
if self._addTicket(url,id,tree):
if res.has_key("Ticket"):
res['Ticket']+=1
else:
res['Ticket']=1
print "Ticket"
self._delete_in(id)
pass
self._delete(url)
if typedoc=="{%s}ReplyClient_v2" % ns["ns"]:
if res.has_key("ReplyClient"):
res['ReplyClient']+=1
else:
res['ReplyClient']=1
print "ReplyClient"
self._addplaces(doc[0])
self._delete_in(id)
self._delete(url)
if typedoc=="{%s}ReplyRests_v2" % ns["ns"]:
res['ReplyRests.Products']=self._reload_ostat(doc[0])
self._delete_in(id)
self._delete(url)
if typedoc=="{%s}WayBill_v2" % ns["ns"]:
if self._addWayBill(url,id,tree):
if res.has_key("WayBill"):
res['WayBill']+=1
else:
res['WayBill']=1
self._delete(url)
pass
if typedoc=="{%s}WayBillAct" % ns["ns"] or typedoc=="{%s}WayBillAct_v2" % ns["ns"]:
if self._addWayBillAct(url,id,tree):
if res.has_key("WayBillAct"):
res['WayBillAct']+=1
else:
res['WayBillAct']=1
self._delete(url)
pass
if typedoc=="{%s}TTNInformF2Reg" % ns["ns"]:
if self._addInformBReg(url,id,tree):
if res.has_key("TTNInformBReg"):
res['TTNInformBReg']+=1
else:
res['TTNInformBReg']=1
self._delete(url)
pass
if typedoc=="{%s}ReplyNoAnswerTTN" % ns["ns"]:
res['ReplyNoAnswerTTN']=self._read_nattn(doc[0])
self._delete_in(id)
self._delete(url)
return res
def _recalc(self):
docs=self.db.egais_get_mydocs(0,None,None,None,None)
for d in docs:
iddoc=int(d['id'])
tree=etree.fromstring(d['xml_inform'].encode('utf8'))
if tree=="":
continue
if not self.db.egais_get_mydoc(iddoc):
continue
content=self._readhead_InformBReg(tree)
for pos in content.findall("wbr:Position",ns):
self.struct={}
id=self._readcontent_InformBReg(pos)
self.db.egais_docs_ct_updId(iddoc,id,self.struct)
return True
def _addplaces(self,tree):
clients=tree.find("rc:Clients",ns)
if clients==None:
print "no clients"
return
struct={}
self.db.egais_places_clear()
for t in clients.findall("rc:Client",ns):
t=t.find("oref:OrgInfoV2",ns)
t = findUL(t)
a=t.find("oref:address",ns)
for f in self.db.tb_egais_places.record_add:
r=t.find("oref:"+f,ns)
if r!=None:
struct[f]=r.text
else:
r=a.find("oref:"+f,ns)
if r!=None:
struct[f]=r.text
self.db.egais_places_add(struct)
def _setstruct(self,base,tag,field=None):
t=base.find(tag,ns)
if field==None:
field=tag.replace(":","_")
try:
self.struct[field]=t.text
return True
except:
print "error:%s" % tag
return False
def _readhead_WayBill(self,tree):
owner=tree.find("ns:Owner",ns)
doc=tree.find("ns:Document",ns)
doc=doc[0]
header=doc.find("wb:Header",ns)
node=header.find("wb:Shipper",ns)
shipper=findUL(node)
node=header.find("wb:Consignee",ns)
consignee=findUL(node)
self._setstruct(owner,"ns:FSRAR_ID")
self._setstruct(doc,"wb:Identity")
self._setstruct(header,"wb:NUMBER")
self._setstruct(header,"wb:Date")
self._setstruct(header,"wb:ShippingDate")
self._setstruct(header,"wb:Type")
self._setstruct(header,"wb:UnitType")
self._setstruct(shipper,"oref:INN","send_INN")
self._setstruct(shipper,"oref:KPP","send_KPP")
self._setstruct(shipper,"oref:ShortName","send_ShortName")
self._setstruct(shipper,"oref:ClientRegId","send_RegId")
self._setstruct(consignee,"oref:INN","recv_INN")
self._setstruct(consignee,"oref:KPP","recv_KPP")
self._setstruct(consignee,"oref:ShortName","recv_ShortName")
self._setstruct(consignee,"oref:ClientRegId","recv_RegId")
content=doc.find("wb:Content",ns)
return content
def _readhead_InformBReg(self,tree):
owner=tree.find("ns:Owner",ns)
doc=tree.find("ns:Document",ns)
doc=doc[0]
header=doc.find("wbr:Header",ns)
shipper=header.find("wbr:Shipper",ns)
shipper=findUL(shipper)
consignee=header.find("wbr:Consignee",ns)
consignee=findUL(consignee)
self._setstruct(shipper,"oref:ClientRegId","send_RegId")
self._setstruct(consignee,"oref:ClientRegId","recv_RegId")
self._setstruct(header,"wbr:WBNUMBER")
self._setstruct(header,"wbr:WBRegId","tc_RegId")
self._setstruct(header,"wbr:Identity")
content=doc.find("wbr:Content",ns)
return content
def _readhead_Ticket(self,tree):
doc=tree.find("ns:Document",ns)
doc=doc[0]
self._setstruct(doc,"tc:RegID")
oper=doc.find("tc:OperationResult",ns)
if oper!=None:
self._setstruct(oper,"tc:OperationResult")
self._setstruct(oper,"tc:OperationName")
regid=self.struct['tc_RegID']
del self.struct['tc_RegID']
return regid
def _readhead_ConfirmTicket(self,tree):
doc=tree.find("ns:Document",ns)
doc=doc[0]
header=doc.find("wt:Header",ns)
self._setstruct(header,"wt:WBRegId")
self._setstruct(header,"wt:IsConfirm")
regid=self.struct['wt_WBRegId']
del self.struct['wt_WBRegId']
return regid
def _readhead_WayBillAct(self,tree):
doc=tree.find("ns:Document",ns)
doc=doc[0]
header=doc.find("wa:Header",ns)
self._setstruct(header,"wa:WBRegId")
self._setstruct(header,"wa:IsAccept")
regid=self.struct['wa_WBRegId']
del self.struct['wa_WBRegId']
return regid
def _readcontent_WayBill(self,pos):
informA=pos.find("wb:InformF1",ns)
informB=pos.find("wb:InformF2",ns)
informB=informB.find("pref:InformF2Item",ns)
product=pos.find("wb:Product",ns)
node=product.find("pref:Producer",ns)
producer=findUL(node)
self._setstruct(pos,"wb:Identity")
self._setstruct(pos,"wb:Quantity")
self._setstruct(pos,"wb:Price")
self._setstruct(pos,"wb:Pack_ID")
self._setstruct(pos,"wb:Party")
self._setstruct(informA,"pref:RegId")
self._setstruct(informB,"pref:F2RegId","pref_BRegId")
self._setstruct(product,"pref:Type")
if not self._setstruct(product,"pref:ShortName"):
self._setstruct(product,"pref:FullName","pref_ShortName")
self._setstruct(product,"pref:AlcCode")
self._setstruct(product,"pref:Capacity")
self._setstruct(product,"pref:AlcVolume")
self._setstruct(product,"pref:ProductVCode")
self._setstruct(producer,"oref:ClientRegId")
self._setstruct(producer,"oref:INN")
self._setstruct(producer,"oref:KPP")
self._setstruct(producer,"oref:ShortName")
def _readcontent_InformBReg(self,pos):
self._setstruct(pos,"wbr:Identity")
self._setstruct(pos,"wbr:InformF2RegId","wbr_InformBRegId")
id=self.struct['wbr_Identity']
del self.struct['wbr_Identity']
return id
def _read_nattn(self,doc):
content=doc.find("ttn:ttnlist",ns)
self.db._truncate(db.TB_EGAIS_DOCS_NEED)
findtag=("ttn:WbRegID","ttn:ttnNumber","ttn:ttnDate","ttn:Shipper")
res=0
for t in content.findall("ttn:NoAnswer",ns):
struct={}
for tag in findtag:
val=t.find(tag,ns)
if val!=None:
struct[tag.replace(":","_")] = val.text
res+=1
self.db._insert(db.TB_EGAIS_DOCS_NEED,struct)
return res
def _reload_ostat(self,tree):
replacing = {
'rst_InformARegId':'rst_InformF1RegId',
'rst_InformBRegId':'rst_InformF2RegId',
}
products=tree.find("rst:Products",ns)
if products==None:
print "no products"
return
res=0
self.db.egais_ostat_clear()
for t in products.findall("rst:StockPosition",ns):
n=t.find("rst:Product",ns)
p=n.find("pref:Producer",ns)
# UL FO ...
ul=findUL(p)
a=ul.find("oref:address",ns)
struct={}
for f in self.db.tb_egais_ostat.record_add:
if f in replacing:
rf=replacing[f]
else:
rf=f
xf=rf.replace("_",":")
for x in (t,n,p,a):
r=x.find(xf,ns)
if r!=None:
break
if r!=None:
struct[f]=r.text
res+=1
#print struct
self.db.egais_ostat_add(struct)
return res
def _addTicket(self,url,reply_id,tree):
self.struct={}
id=self._readhead_Ticket(tree)
if not self.db.egais_find_replyId(reply_id):
return False
if self.db.egais_doc[3] == 5:
return True
if self.struct.has_key("tc_OperationResult"):
if self.struct['tc_OperationResult'] == 'Accepted':
self.struct['status'] = 5
else:
self.struct['status'] = 6
else:
self.struct['status'] = 4
self.struct['xml_ticket']= self.data
self.struct['reply_id'] = reply_id
self.struct['ns_typedoc']= "Ticket"
id=self.db.egais_doc[0]
return self.db.egais_docs_hd_upd(id,self.struct)
def _addConfirmTicket(self,url,reply_id,tree):
self.struct={}
regid=self._readhead_ConfirmTicket(tree)
if not self.db.egais_find_ttn(regid):
return False
if self.struct.has_key("wt_IsConfirm"):
if self.struct['wt_IsConfirm'] == 'Accepted':
self.struct['status'] = 5
else:
self.struct['status'] = 6
self.struct['xml_ticket']= self.data
self.struct['ns_typedoc']= "ConfirmTicket"
id=self.db.egais_doc[0]
return self.db.egais_docs_hd_upd(id,self.struct)
def _addWayBillAct(self,url,reply_id,tree):
self.struct={}
regid=self._readhead_WayBillAct(tree)
if not self.db.egais_find_ttn(regid):
return False
if self.struct.has_key("wa_IsAccept"):
if self.struct['wa_IsAccept'] == 'Accepted':
self.struct['status'] = 5
else:
self.struct['status'] = 6
self.struct['xml_ticket']= self.data
self.struct['ns_typedoc']= "WayBillAct_v2"
self.struct['wt_IsConfirm']=self.struct['wa_IsAccept']
del self.struct['wa_IsAccept']
id=self.db.egais_doc[0]
return self.db.egais_docs_hd_upd(id,self.struct)
def _addWayBill(self,url,id,tree):
self.struct={}
self.struct['type'] = 0
self.struct['status'] = 0
self.struct['xml_doc'] = self.data
self.struct['reply_id'] = id
self.struct['url'] = url
self.struct['ns_typedoc']= "WayBill_v2"
content=self._readhead_WayBill(tree)
if self.db.egais_docs_find(0,self.struct["recv_RegId"],self.struct["send_RegId"],self.struct["wb_NUMBER"]):
#Возможно это стоит включить. Если документ приходит с темже номером то он перезаписывается
#!!! Требует проверки!
self.db.egais_docs_hd_del(self.db.egais_doc[0])
if self.db.egais_get_mydoc(self.db.egais_doc[0]):
return False
id=self.db.egais_docs_hd_add(self.struct)
if id==0:
return False
for pos in content.findall("wb:Position",ns):
self.struct={'iddoc':id}
self._readcontent_WayBill(pos)
self.struct['real_Quantity']=self.struct['wb_Quantity']
self.db.egais_docs_ct_add(self.struct)
return True
def _addInformBReg(self,url,id,tree):
self.struct={}
content=self._readhead_InformBReg(tree)
if not self.db.egais_find_replyId(id) or id=="":
print "error:replyid %s" % id
if not self.db.egais_docs_find(None,self.struct["recv_RegId"],self.struct["send_RegId"],self.struct["wbr_WBNUMBER"]):
print "not found doc"
return False
if self.db.egais_doc[3] not in (0,3,5,6) :
print "error:doc status=%d" % self.db.egais_doc[3]
#return False
iddoc=self.db.egais_doc[0]
tc_regId=self.struct['tc_RegId']
self.struct={}
if self.db.egais_doc[3]==0:
self.struct['status']=1
if self.db.egais_doc[3]==3:
self.struct['status']=4
self.struct['xml_inform']=self.data
self.struct['url']=url
#self.struct['reply_id'] = id
self.struct['ns_typedoc']= "InformF2Reg"
self.struct['tc_RegId']=tc_regId
#print self.struct;
self.db.egais_docs_hd_upd(iddoc,self.struct)
for pos in content.findall("wbr:Position",ns):
self.struct={}
id=self._readcontent_InformBReg(pos)
self.db.egais_docs_ct_updId(iddoc,id,self.struct)
return True
def _addReplyNoAnswerTTN(self,url,id,tree):
self.struct={}
content=self._readhead_InformBReg(tree)
def _make_act(self,id):
if not self.db.egais_get_mydoc(id):
return ""
xml=XML_SEND_ACT.replace("%fsrar_id%",self.fsrar_id)
xml=xml.replace("%accept%",self.db.egais_doc_hd['answer'])
xml=xml.replace("%iddoc%",str(self.db.sets['idplace'])+"_"+self.db.egais_doc_hd['id'])
xml=xml.replace("%date%",curdate2my())
xml=xml.replace("%wb_RegId%",self.db.egais_doc_hd['tc_RegId'])
XML=xml
XML_CONTENT=""
use_content=False
for ct in self.db.egais_doc_ct:
if ct['real_Quantity']!=ct['wb_Quantity']:
use_content=True
xml=XML_ACT_CONTENT.replace("%identity%",ct['wb_Identity'])
xml=xml.replace("%real%",ct['real_Quantity'])
xml=xml.replace("%wb_RegId%",str(ct['wbr_InformBRegId']))
XML_CONTENT+=xml
if not use_content:
XML_CONTENT=""
XML=XML.replace("%content%",XML_CONTENT)
return XML.encode("utf8")
def _make_return(self,id):
if not self.db.egais_get_mydoc(id):
return ""
replacing = {
'wbr_InformBRegId':'wbr_InformF2RegId',
}
xml=XML_SEND_WAYBILL_HEAD.replace("%fsrar_id%",self.fsrar_id)
rlist={ "%identity%" :"wb_Identity",\
"%number%" :"wb_NUMBER",\
"%dt%" :"wb_Date",\
"%inn%" :"send_INN",\
"%kpp%" :"send_KPP",\
"%regid%" :"send_RegId",\
"%name%" :"send_ShortName",\
"%send_inn%" :"recv_INN",\
"%send_kpp%" :"recv_KPP",\
"%send_regid%" :"recv_RegId",\
"%send_name%" :"recv_ShortName",\
}
for k,v in rlist.items():
if v.find('ShortName')!=-1:
self.db.egais_doc_hd[v]=self.db.egais_doc_hd[v][:64]
xml=xml.replace(k,self.db.egais_doc_hd[v])
xml=xml.replace( "%type%","WBReturnFromMe")
rlist={ "%identity%" :"wb_Identity",\
"%quantity%" :"real_Quantity",\
"%price%" :"wb_Price",\
"%inform_a%" :"pref_RegId",\
"%inform_b%" :"wbr_InformBRegId",\
"%shortname%" :"pref_ShortName",\
"%alccode%" :"pref_AlcCode",\
"%capacity%" :"pref_Capacity",\
"%alcvolume%" :"pref_AlcVolume",\
"%productvcode%":"pref_ProductVCode",\
"%regid%" :"oref_ClientRegId",\
"%inn%" :"oref_INN",\
"%kpp%" :"oref_KPP",\
"%oref_shortname%" :"oref_ShortName",\
}
XML_CONTENT=""
for ct in self.db.egais_doc_ct:
xml2=XML_SEND_WAYBILL_CONTENT
for k,v in rlist.items():
if ct[v]!=None and ct[v]!='None':
if v=='pref_ShortName':
ct[v]=ct[v][:64]
xml2=xml2.replace(k,ct[v])
else:
xml2=xml2.replace(k,"None")
t=v.replace("_",":")
t1="<%s>" % t
t2="</%s>" % t
xml2=xml2.replace(t1+"None"+t2,"")
xml2=xml2.replace("%pref_type%",u"АП")
xml2=xml2.replace("%packet%",self.db.egais_doc_hd["wb_UnitType"])
XML_CONTENT+="\n"+xml2
XML=xml.replace("%content%",XML_CONTENT)
return XML.encode("utf8")
def _make_check(self,_type,ncheck,pos):
dttm=datetime.now().strftime(format="%d%m%y%H%M")
xml=XML_CHECK.replace("%inn%",self.db.sets['inn'])
xml=xml.replace("%kpp%",self.db.sets['kpp'])
xml=xml.replace("%name%",self.db.sets['orgname'])
xml=xml.replace("%address%",self.db.sets['placename'])
xml=xml.replace("%kassa%",self.db.sets['nkassa'])
xml=xml.replace("%datetime%",dttm)
xml=xml.replace("%ncheck%",str(ncheck))
XML=xml
XML_CONTENT=""
for i in range(len(pos)):
p=pos[i]
if not (p['storno']==0 and p['p_alco']==1):
continue
xml=XML_BOTTLE.replace("%barcode%",p['barcode'])
xml=xml.replace("%ean%",p['p_shk'])
if p['paramf1']>0 and _type==1:
price=-p['paramf1']
else:
price=p['paramf1']
xml=xml.replace("%price%",price.__format__(".2f"))
if p['p_litrag']!=0:
xml=xml.replace("%litrag%","volume=\"%s\"" % p['p_litrag'].__format__(".4f"))
else:
xml=xml.replace("%litrag%","")
XML_CONTENT+=xml
XML=XML.replace("%bottles%",XML_CONTENT)
return XML.encode("utf8")
def _make_move(self,id):
if not self.db.egais_get_mydoc(id):
return ""
xml=XML_SEND_WAYBILL_HEAD.replace("%fsrar_id%",self.fsrar_id)
rlist={ "%identity%" :"wb_Identity",\
"%number%" :"wb_NUMBER",\
"%dt%" :"wb_Date",\
"%packet%" :"wb_UnitType",\
"%inn%" :"send_INN",\
"%kpp%" :"send_KPP",\
"%regid%" :"send_RegId",\
"%name%" :"send_ShortName",\
"%send_inn%" :"recv_INN",\
"%send_kpp%" :"recv_KPP",\
"%send_regid%" :"recv_RegId",\
"%send_name%" :"recv_ShortName",\
}
for k,v in rlist.items():
if v.find('ShortName')!=-1:
self.db.egais_doc_hd[v]=self.db.egais_doc_hd[v][:64]
xml=xml.replace(k,self.db.egais_doc_hd[v])
xml=xml.replace( "%type%","WBReturnFromMe")
rlist={ "%identity%" :"wb_Identity",\
"%quantity%" :"real_Quantity",\
"%price%" :"wb_Price",\
"%inform_a%" :"pref_RegId",\
"%inform_b%" :"wbr_InformF2RegId",\
"%shortname%" :"pref_ShortName",\
"%alccode%" :"pref_AlcCode",\
"%capacity%" :"pref_Capacity",\
"%alcvolume%" :"pref_AlcVolume",\
"%productvcode%":"pref_ProductVCode",\
"%regid%" :"oref_ClientRegId",\
"%inn%" :"oref_INN",\
"%kpp%" :"oref_KPP",\
"%oref_shortname%" :"oref_ShortName",\
}
XML_CONTENT=""
for ct in self.db.egais_doc_ct:
xml2=XML_SEND_WAYBILL_CONTENT
for k,v in rlist.items():
if ct[v]!=None and ct[v]!='None':
if v=='pref_ShortName':
ct[v]=ct[v][:64]
xml2=xml2.replace(k,ct[v])
else:
xml2=xml2.replace(k,"None")
t=v.replace("_",":")
t1="<%s>" % t
t2="</%s>" % t
xml2=xml2.replace(t1+"None"+t2,"")
xml2=xml2.replace("%pref_type%",u"АП")
XML_CONTENT+="\n"+xml2
XML=xml.replace("%content%",XML_CONTENT)
return XML.encode("utf8")
|
gpl-3.0
| -6,480,574,202,339,699,000
| 32.91342
| 141
| 0.52724
| false
| 3.126097
| false
| false
| false
|
MichSchli/Mindblocks
|
model/module/module_repository.py
|
1
|
4319
|
import json
import os
from model.component.component_specification import ComponentSpecification
from model.component.subgraph_component import SubgraphComponentModel
from model.module.graph_prototype.graph_prototype_specifications import GraphPrototypeSpecifications
from model.module.module_model import ModuleModel
from model.module.toolbox_item.toolbox_item_model import ToolboxItemModel
from model.module.toolbox_item.toolbox_item_specifications import ToolboxItemSpecifications
from observables.observable_dictionary import ObservableDict
from packages.graph.subgraph_component import SubgraphComponent
class ModuleRepository:
modules = None
component_dir = '/home/michael/Projects/Mindblocks/packages'
prototype_repository = None
graph_prototype_repository = None
def __init__(self, prototype_repository, graph_prototype_repository):
self.prototype_repository = prototype_repository
self.graph_prototype_repository = graph_prototype_repository
self.modules = ObservableDict()
def load_basic_modules(self):
for package_name in self.get_all_package_names():
module = self.load_basic_module_by_package_name(package_name)
self.modules.append(module)
def load_basic_module_by_package_name(self, package_name):
manifest = self.load_package_manifest(package_name)
module = ModuleModel(manifest['name'])
prototypes = self.prototype_repository.load_prototypes(manifest)
module.extend_prototypes(prototypes)
return module
def get_prototype_by_id(self, id):
for module in self.get_basic_modules(None):
for prototype in module.components:
print(prototype.get_unique_identifier())
print(id)
if prototype.get_unique_identifier() == id:
print(prototype)
return prototype
print("NOT FOUND")
for module in self.get_canvas_modules(None):
for prototype in module.components:
if prototype.get_unique_identifier() == id:
return prototype
return None
def get_prototype(self, specifications):
basic_prototype = self.prototype_repository.get(specifications)
if basic_prototype is not None:
return basic_prototype
graph_prototype_specifications = GraphPrototypeSpecifications()
graph_prototype_specifications.graph_identifier = specifications.name
graph_prototype_specifications.canvas_identifier = specifications.canvas
graph_prototype = self.graph_prototype_repository.get(graph_prototype_specifications)[0]
return graph_prototype
def get_basic_modules(self, specifications):
return list(self.modules.elements.values())
def get_canvas_modules(self, specifications):
prototypes = self.graph_prototype_repository.get_all()
modules = {}
for prototype in prototypes:
if prototype.canvas_identifier not in modules:
modules[prototype.canvas_identifier] = ModuleModel(prototype.canvas_identifier)
modules[prototype.canvas_identifier].components.append(prototype)
return list(modules.values())
'''
Logic for loading modules:
'''
def get_all_package_names(self):
all_subitems = os.listdir(self.component_dir)
filtered_subitems = [item for item in all_subitems if self.filter_name(item)]
absolute_subitems = [(d,os.path.join(self.component_dir, d)) for d in filtered_subitems]
subfolders = [d[0] for d in absolute_subitems if os.path.isdir(d[1])]
return subfolders
def filter_name(self, name):
if name.startswith('.') or name.startswith('_'):
return False
return True
def load_manifest(self, path):
manifest_path = os.path.join(path, 'manifest.json')
with open(manifest_path) as data_file:
manifest = json.load(data_file)
manifest['path'] = path
return manifest
def load_package_manifest(self, package_name):
manifest_path = os.path.join(self.component_dir, package_name)
manifest = self.load_manifest(manifest_path)
manifest['package'] = package_name
return manifest
|
gpl-3.0
| -4,040,655,532,269,729,000
| 36.241379
| 100
| 0.683492
| false
| 4.371457
| false
| false
| false
|
batermj/algorithm-challenger
|
code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/test/test_pathlib.py
|
1
|
97139
|
import collections.abc
import io
import os
import sys
import errno
import pathlib
import pickle
import socket
import stat
import tempfile
import unittest
from unittest import mock
from test import support
from test.support import TESTFN, FakePath
try:
import grp, pwd
except ImportError:
grp = pwd = None
class _BaseFlavourTest(object):
def _check_parse_parts(self, arg, expected):
f = self.flavour.parse_parts
sep = self.flavour.sep
altsep = self.flavour.altsep
actual = f([x.replace('/', sep) for x in arg])
self.assertEqual(actual, expected)
if altsep:
actual = f([x.replace('/', altsep) for x in arg])
self.assertEqual(actual, expected)
def test_parse_parts_common(self):
check = self._check_parse_parts
sep = self.flavour.sep
# Unanchored parts.
check([], ('', '', []))
check(['a'], ('', '', ['a']))
check(['a/'], ('', '', ['a']))
check(['a', 'b'], ('', '', ['a', 'b']))
# Expansion.
check(['a/b'], ('', '', ['a', 'b']))
check(['a/b/'], ('', '', ['a', 'b']))
check(['a', 'b/c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Collapsing and stripping excess slashes.
check(['a', 'b//c', 'd'], ('', '', ['a', 'b', 'c', 'd']))
check(['a', 'b/c/', 'd'], ('', '', ['a', 'b', 'c', 'd']))
# Eliminating standalone dots.
check(['.'], ('', '', []))
check(['.', '.', 'b'], ('', '', ['b']))
check(['a', '.', 'b'], ('', '', ['a', 'b']))
check(['a', '.', '.'], ('', '', ['a']))
# The first part is anchored.
check(['/a/b'], ('', sep, [sep, 'a', 'b']))
check(['/a', 'b'], ('', sep, [sep, 'a', 'b']))
check(['/a/', 'b'], ('', sep, [sep, 'a', 'b']))
# Ignoring parts before an anchored part.
check(['a', '/b', 'c'], ('', sep, [sep, 'b', 'c']))
check(['a', '/b', '/c'], ('', sep, [sep, 'c']))
class PosixFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._posix_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# Collapsing of excess leading slashes, except for the double-slash
# special case.
check(['//a', 'b'], ('', '//', ['//', 'a', 'b']))
check(['///a', 'b'], ('', '/', ['/', 'a', 'b']))
check(['////a', 'b'], ('', '/', ['/', 'a', 'b']))
# Paths which look like NT paths aren't treated specially.
check(['c:a'], ('', '', ['c:a']))
check(['c:\\a'], ('', '', ['c:\\a']))
check(['\\a'], ('', '', ['\\a']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a/b'), ('', '', 'a/b'))
self.assertEqual(f('a/b/'), ('', '', 'a/b/'))
self.assertEqual(f('/a'), ('', '/', 'a'))
self.assertEqual(f('/a/b'), ('', '/', 'a/b'))
self.assertEqual(f('/a/b/'), ('', '/', 'a/b/'))
# The root is collapsed when there are redundant slashes
# except when there are exactly two leading slashes, which
# is a special case in POSIX.
self.assertEqual(f('//a'), ('', '//', 'a'))
self.assertEqual(f('///a'), ('', '/', 'a'))
self.assertEqual(f('///a/b'), ('', '/', 'a/b'))
# Paths which look like NT paths aren't treated specially.
self.assertEqual(f('c:/a/b'), ('', '', 'c:/a/b'))
self.assertEqual(f('\\/a/b'), ('', '', '\\/a/b'))
self.assertEqual(f('\\a\\b'), ('', '', '\\a\\b'))
class NTFlavourTest(_BaseFlavourTest, unittest.TestCase):
flavour = pathlib._windows_flavour
def test_parse_parts(self):
check = self._check_parse_parts
# First part is anchored.
check(['c:'], ('c:', '', ['c:']))
check(['c:/'], ('c:', '\\', ['c:\\']))
check(['/'], ('', '\\', ['\\']))
check(['c:a'], ('c:', '', ['c:', 'a']))
check(['c:/a'], ('c:', '\\', ['c:\\', 'a']))
check(['/a'], ('', '\\', ['\\', 'a']))
# UNC paths.
check(['//a/b'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['//a/b/'], ('\\\\a\\b', '\\', ['\\\\a\\b\\']))
check(['//a/b/c'], ('\\\\a\\b', '\\', ['\\\\a\\b\\', 'c']))
# Second part is anchored, so that the first part is ignored.
check(['a', 'Z:b', 'c'], ('Z:', '', ['Z:', 'b', 'c']))
check(['a', 'Z:/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
# UNC paths.
check(['a', '//b/c', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Collapsing and stripping excess slashes.
check(['a', 'Z://b//c/', 'd/'], ('Z:', '\\', ['Z:\\', 'b', 'c', 'd']))
# UNC paths.
check(['a', '//b/c//', 'd'], ('\\\\b\\c', '\\', ['\\\\b\\c\\', 'd']))
# Extended paths.
check(['//?/c:/'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\']))
check(['//?/c:/a'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'a']))
check(['//?/c:/a', '/b'], ('\\\\?\\c:', '\\', ['\\\\?\\c:\\', 'b']))
# Extended UNC paths (format is "\\?\UNC\server\share").
check(['//?/UNC/b/c'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\']))
check(['//?/UNC/b/c/d'], ('\\\\?\\UNC\\b\\c', '\\', ['\\\\?\\UNC\\b\\c\\', 'd']))
# Second part has a root but not drive.
check(['a', '/b', 'c'], ('', '\\', ['\\', 'b', 'c']))
check(['Z:/a', '/b', 'c'], ('Z:', '\\', ['Z:\\', 'b', 'c']))
check(['//?/Z:/a', '/b', 'c'], ('\\\\?\\Z:', '\\', ['\\\\?\\Z:\\', 'b', 'c']))
def test_splitroot(self):
f = self.flavour.splitroot
self.assertEqual(f(''), ('', '', ''))
self.assertEqual(f('a'), ('', '', 'a'))
self.assertEqual(f('a\\b'), ('', '', 'a\\b'))
self.assertEqual(f('\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('c:a\\b'), ('c:', '', 'a\\b'))
self.assertEqual(f('c:\\a\\b'), ('c:', '\\', 'a\\b'))
# Redundant slashes in the root are collapsed.
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
self.assertEqual(f('\\\\\\a/b'), ('', '\\', 'a/b'))
self.assertEqual(f('c:\\\\a'), ('c:', '\\', 'a'))
self.assertEqual(f('c:\\\\\\a/b'), ('c:', '\\', 'a/b'))
# Valid UNC paths.
self.assertEqual(f('\\\\a\\b'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\'), ('\\\\a\\b', '\\', ''))
self.assertEqual(f('\\\\a\\b\\c\\d'), ('\\\\a\\b', '\\', 'c\\d'))
# These are non-UNC paths (according to ntpath.py and test_ntpath).
# However, command.com says such paths are invalid, so it's
# difficult to know what the right semantics are.
self.assertEqual(f('\\\\\\a\\b'), ('', '\\', 'a\\b'))
self.assertEqual(f('\\\\a'), ('', '\\', 'a'))
#
# Tests for the pure classes.
#
class _BasePurePathTest(object):
# Keys are canonical paths, values are list of tuples of arguments
# supposed to produce equal paths.
equivalences = {
'a/b': [
('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'),
('a/b/',), ('a//b',), ('a//b//',),
# Empty components get removed.
('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''),
],
'/b/c/d': [
('a', '/b/c', 'd'), ('a', '///b//c', 'd/'),
('/a', '/b/c', 'd'),
# Empty components get removed.
('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'),
],
}
def setUp(self):
p = self.cls('a')
self.flavour = p._flavour
self.sep = self.flavour.sep
self.altsep = self.flavour.altsep
def test_constructor_common(self):
P = self.cls
p = P('a')
self.assertIsInstance(p, P)
P('a', 'b', 'c')
P('/a', 'b', 'c')
P('a/b/c')
P('/a/b/c')
P(FakePath("a/b/c"))
self.assertEqual(P(P('a')), P('a'))
self.assertEqual(P(P('a'), 'b'), P('a/b'))
self.assertEqual(P(P('a'), P('b')), P('a/b'))
self.assertEqual(P(P('a'), P('b'), P('c')), P(FakePath("a/b/c")))
def _check_str_subclass(self, *args):
# Issue #21127: it should be possible to construct a PurePath object
# from a str subclass instance, and it then gets converted to
# a pure str object.
class StrSubclass(str):
pass
P = self.cls
p = P(*(StrSubclass(x) for x in args))
self.assertEqual(p, P(*args))
for part in p.parts:
self.assertIs(type(part), str)
def test_str_subclass_common(self):
self._check_str_subclass('')
self._check_str_subclass('.')
self._check_str_subclass('a')
self._check_str_subclass('a/b.txt')
self._check_str_subclass('/a/b.txt')
def test_join_common(self):
P = self.cls
p = P('a/b')
pp = p.joinpath('c')
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p.joinpath('c', 'd')
self.assertEqual(pp, P('a/b/c/d'))
pp = p.joinpath(P('c'))
self.assertEqual(pp, P('a/b/c'))
pp = p.joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div_common(self):
# Basically the same as joinpath().
P = self.cls
p = P('a/b')
pp = p / 'c'
self.assertEqual(pp, P('a/b/c'))
self.assertIs(type(pp), type(p))
pp = p / 'c/d'
self.assertEqual(pp, P('a/b/c/d'))
pp = p / 'c' / 'd'
self.assertEqual(pp, P('a/b/c/d'))
pp = 'c' / p / 'd'
self.assertEqual(pp, P('c/a/b/d'))
pp = p / P('c')
self.assertEqual(pp, P('a/b/c'))
pp = p/ '/c'
self.assertEqual(pp, P('/c'))
def _check_str(self, expected, args):
p = self.cls(*args)
self.assertEqual(str(p), expected.replace('/', self.sep))
def test_str_common(self):
# Canonicalized paths roundtrip.
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self._check_str(pathstr, (pathstr,))
# Special case for the empty path.
self._check_str('.', ('',))
# Other tests for str() are in test_equivalences().
def test_as_posix_common(self):
P = self.cls
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
self.assertEqual(P(pathstr).as_posix(), pathstr)
# Other tests for as_posix() are in test_equivalences().
def test_as_bytes_common(self):
sep = os.fsencode(self.sep)
P = self.cls
self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b')
def test_as_uri_common(self):
P = self.cls
with self.assertRaises(ValueError):
P('a').as_uri()
with self.assertRaises(ValueError):
P().as_uri()
def test_repr_common(self):
for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'):
p = self.cls(pathstr)
clsname = p.__class__.__name__
r = repr(p)
# The repr() is in the form ClassName("forward-slashes path").
self.assertTrue(r.startswith(clsname + '('), r)
self.assertTrue(r.endswith(')'), r)
inner = r[len(clsname) + 1 : -1]
self.assertEqual(eval(inner), p.as_posix())
# The repr() roundtrips.
q = eval(r, pathlib.__dict__)
self.assertIs(q.__class__, p.__class__)
self.assertEqual(q, p)
self.assertEqual(repr(q), r)
def test_eq_common(self):
P = self.cls
self.assertEqual(P('a/b'), P('a/b'))
self.assertEqual(P('a/b'), P('a', 'b'))
self.assertNotEqual(P('a/b'), P('a'))
self.assertNotEqual(P('a/b'), P('/a/b'))
self.assertNotEqual(P('a/b'), P())
self.assertNotEqual(P('/a/b'), P('/'))
self.assertNotEqual(P(), P('/'))
self.assertNotEqual(P(), "")
self.assertNotEqual(P(), {})
self.assertNotEqual(P(), int)
def test_match_common(self):
P = self.cls
self.assertRaises(ValueError, P('a').match, '')
self.assertRaises(ValueError, P('a').match, '.')
# Simple relative pattern.
self.assertTrue(P('b.py').match('b.py'))
self.assertTrue(P('a/b.py').match('b.py'))
self.assertTrue(P('/a/b.py').match('b.py'))
self.assertFalse(P('a.py').match('b.py'))
self.assertFalse(P('b/py').match('b.py'))
self.assertFalse(P('/a.py').match('b.py'))
self.assertFalse(P('b.py/c').match('b.py'))
# Wilcard relative pattern.
self.assertTrue(P('b.py').match('*.py'))
self.assertTrue(P('a/b.py').match('*.py'))
self.assertTrue(P('/a/b.py').match('*.py'))
self.assertFalse(P('b.pyc').match('*.py'))
self.assertFalse(P('b./py').match('*.py'))
self.assertFalse(P('b.py/c').match('*.py'))
# Multi-part relative pattern.
self.assertTrue(P('ab/c.py').match('a*/*.py'))
self.assertTrue(P('/d/ab/c.py').match('a*/*.py'))
self.assertFalse(P('a.py').match('a*/*.py'))
self.assertFalse(P('/dab/c.py').match('a*/*.py'))
self.assertFalse(P('ab/c.py/d').match('a*/*.py'))
# Absolute pattern.
self.assertTrue(P('/b.py').match('/*.py'))
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('a/b.py').match('/*.py'))
self.assertFalse(P('/a/b.py').match('/*.py'))
# Multi-part absolute pattern.
self.assertTrue(P('/a/b.py').match('/a/*.py'))
self.assertFalse(P('/ab.py').match('/a/*.py'))
self.assertFalse(P('/a/b/c.py').match('/a/*.py'))
# Multi-part glob-style pattern.
self.assertFalse(P('/a/b/c.py').match('/**/*.py'))
self.assertTrue(P('/a/b/c.py').match('/a/**/*.py'))
def test_ordering_common(self):
# Ordering is tuple-alike.
def assertLess(a, b):
self.assertLess(a, b)
self.assertGreater(b, a)
P = self.cls
a = P('a')
b = P('a/b')
c = P('abc')
d = P('b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
P = self.cls
a = P('/a')
b = P('/a/b')
c = P('/abc')
d = P('/b')
assertLess(a, b)
assertLess(a, c)
assertLess(a, d)
assertLess(b, c)
assertLess(c, d)
with self.assertRaises(TypeError):
P() < {}
def test_parts_common(self):
# `parts` returns a tuple.
sep = self.sep
P = self.cls
p = P('a/b')
parts = p.parts
self.assertEqual(parts, ('a', 'b'))
# The object gets reused.
self.assertIs(parts, p.parts)
# When the path is absolute, the anchor is a separate part.
p = P('/a/b')
parts = p.parts
self.assertEqual(parts, (sep, 'a', 'b'))
def test_fspath_common(self):
P = self.cls
p = P('a/b')
self._check_str(p.__fspath__(), ('a/b',))
self._check_str(os.fspath(p), ('a/b',))
def test_equivalences(self):
for k, tuples in self.equivalences.items():
canon = k.replace('/', self.sep)
posix = k.replace(self.sep, '/')
if canon != posix:
tuples = tuples + [
tuple(part.replace('/', self.sep) for part in t)
for t in tuples
]
tuples.append((posix, ))
pcanon = self.cls(canon)
for t in tuples:
p = self.cls(*t)
self.assertEqual(p, pcanon, "failed with args {}".format(t))
self.assertEqual(hash(p), hash(pcanon))
self.assertEqual(str(p), canon)
self.assertEqual(p.as_posix(), posix)
def test_parent_common(self):
# Relative
P = self.cls
p = P('a/b/c')
self.assertEqual(p.parent, P('a/b'))
self.assertEqual(p.parent.parent, P('a'))
self.assertEqual(p.parent.parent.parent, P())
self.assertEqual(p.parent.parent.parent.parent, P())
# Anchored
p = P('/a/b/c')
self.assertEqual(p.parent, P('/a/b'))
self.assertEqual(p.parent.parent, P('/a'))
self.assertEqual(p.parent.parent.parent, P('/'))
self.assertEqual(p.parent.parent.parent.parent, P('/'))
def test_parents_common(self):
# Relative
P = self.cls
p = P('a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('a/b'))
self.assertEqual(par[1], P('a'))
self.assertEqual(par[2], P('.'))
self.assertEqual(list(par), [P('a/b'), P('a'), P('.')])
with self.assertRaises(IndexError):
par[-1]
with self.assertRaises(IndexError):
par[3]
with self.assertRaises(TypeError):
par[0] = p
# Anchored
p = P('/a/b/c')
par = p.parents
self.assertEqual(len(par), 3)
self.assertEqual(par[0], P('/a/b'))
self.assertEqual(par[1], P('/a'))
self.assertEqual(par[2], P('/'))
self.assertEqual(list(par), [P('/a/b'), P('/a'), P('/')])
with self.assertRaises(IndexError):
par[3]
def test_drive_common(self):
P = self.cls
self.assertEqual(P('a/b').drive, '')
self.assertEqual(P('/a/b').drive, '')
self.assertEqual(P('').drive, '')
def test_root_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').root, '')
self.assertEqual(P('a/b').root, '')
self.assertEqual(P('/').root, sep)
self.assertEqual(P('/a/b').root, sep)
def test_anchor_common(self):
P = self.cls
sep = self.sep
self.assertEqual(P('').anchor, '')
self.assertEqual(P('a/b').anchor, '')
self.assertEqual(P('/').anchor, sep)
self.assertEqual(P('/a/b').anchor, sep)
def test_name_common(self):
P = self.cls
self.assertEqual(P('').name, '')
self.assertEqual(P('.').name, '')
self.assertEqual(P('/').name, '')
self.assertEqual(P('a/b').name, 'b')
self.assertEqual(P('/a/b').name, 'b')
self.assertEqual(P('/a/b/.').name, 'b')
self.assertEqual(P('a/b.py').name, 'b.py')
self.assertEqual(P('/a/b.py').name, 'b.py')
def test_suffix_common(self):
P = self.cls
self.assertEqual(P('').suffix, '')
self.assertEqual(P('.').suffix, '')
self.assertEqual(P('..').suffix, '')
self.assertEqual(P('/').suffix, '')
self.assertEqual(P('a/b').suffix, '')
self.assertEqual(P('/a/b').suffix, '')
self.assertEqual(P('/a/b/.').suffix, '')
self.assertEqual(P('a/b.py').suffix, '.py')
self.assertEqual(P('/a/b.py').suffix, '.py')
self.assertEqual(P('a/.hgrc').suffix, '')
self.assertEqual(P('/a/.hgrc').suffix, '')
self.assertEqual(P('a/.hg.rc').suffix, '.rc')
self.assertEqual(P('/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('/a/Some name. Ending with a dot.').suffix, '')
def test_suffixes_common(self):
P = self.cls
self.assertEqual(P('').suffixes, [])
self.assertEqual(P('.').suffixes, [])
self.assertEqual(P('/').suffixes, [])
self.assertEqual(P('a/b').suffixes, [])
self.assertEqual(P('/a/b').suffixes, [])
self.assertEqual(P('/a/b/.').suffixes, [])
self.assertEqual(P('a/b.py').suffixes, ['.py'])
self.assertEqual(P('/a/b.py').suffixes, ['.py'])
self.assertEqual(P('a/.hgrc').suffixes, [])
self.assertEqual(P('/a/.hgrc').suffixes, [])
self.assertEqual(P('a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, [])
def test_stem_common(self):
P = self.cls
self.assertEqual(P('').stem, '')
self.assertEqual(P('.').stem, '')
self.assertEqual(P('..').stem, '..')
self.assertEqual(P('/').stem, '')
self.assertEqual(P('a/b').stem, 'b')
self.assertEqual(P('a/b.py').stem, 'b')
self.assertEqual(P('a/.hgrc').stem, '.hgrc')
self.assertEqual(P('a/.hg.rc').stem, '.hg')
self.assertEqual(P('a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name_common(self):
P = self.cls
self.assertEqual(P('a/b').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/b.py').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml'))
self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml'))
self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml'))
self.assertRaises(ValueError, P('').with_name, 'd.xml')
self.assertRaises(ValueError, P('.').with_name, 'd.xml')
self.assertRaises(ValueError, P('/').with_name, 'd.xml')
self.assertRaises(ValueError, P('a/b').with_name, '')
self.assertRaises(ValueError, P('a/b').with_name, '/c')
self.assertRaises(ValueError, P('a/b').with_name, 'c/')
self.assertRaises(ValueError, P('a/b').with_name, 'c/d')
def test_with_suffix_common(self):
P = self.cls
self.assertEqual(P('a/b').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b').with_suffix('.gz'), P('/a/b.gz'))
self.assertEqual(P('a/b.py').with_suffix('.gz'), P('a/b.gz'))
self.assertEqual(P('/a/b.py').with_suffix('.gz'), P('/a/b.gz'))
# Stripping suffix.
self.assertEqual(P('a/b.py').with_suffix(''), P('a/b'))
self.assertEqual(P('/a/b').with_suffix(''), P('/a/b'))
# Path doesn't have a "filename" component.
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
# Invalid suffix.
self.assertRaises(ValueError, P('a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('a/b').with_suffix, '/')
self.assertRaises(ValueError, P('a/b').with_suffix, '.')
self.assertRaises(ValueError, P('a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('a/b').with_suffix, '.c/.d')
self.assertRaises(ValueError, P('a/b').with_suffix, './.d')
self.assertRaises(ValueError, P('a/b').with_suffix, '.d/.')
self.assertRaises(ValueError, P('a/b').with_suffix,
(self.flavour.sep, 'd'))
def test_relative_to_common(self):
P = self.cls
p = P('a/b')
self.assertRaises(TypeError, p.relative_to)
self.assertRaises(TypeError, p.relative_to, b'a')
self.assertEqual(p.relative_to(P()), P('a/b'))
self.assertEqual(p.relative_to(''), P('a/b'))
self.assertEqual(p.relative_to(P('a')), P('b'))
self.assertEqual(p.relative_to('a'), P('b'))
self.assertEqual(p.relative_to('a/'), P('b'))
self.assertEqual(p.relative_to(P('a/b')), P())
self.assertEqual(p.relative_to('a/b'), P())
# With several args.
self.assertEqual(p.relative_to('a', 'b'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('c'))
self.assertRaises(ValueError, p.relative_to, P('a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('a/c'))
self.assertRaises(ValueError, p.relative_to, P('/a'))
p = P('/a/b')
self.assertEqual(p.relative_to(P('/')), P('a/b'))
self.assertEqual(p.relative_to('/'), P('a/b'))
self.assertEqual(p.relative_to(P('/a')), P('b'))
self.assertEqual(p.relative_to('/a'), P('b'))
self.assertEqual(p.relative_to('/a/'), P('b'))
self.assertEqual(p.relative_to(P('/a/b')), P())
self.assertEqual(p.relative_to('/a/b'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/b/c'))
self.assertRaises(ValueError, p.relative_to, P('/a/c'))
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('a'))
def test_pickling_common(self):
P = self.cls
p = P('/a/b')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertIs(pp.__class__, p.__class__)
self.assertEqual(pp, p)
self.assertEqual(hash(pp), hash(p))
self.assertEqual(str(pp), str(p))
class PurePosixPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePosixPath
def test_root(self):
P = self.cls
self.assertEqual(P('/a/b').root, '/')
self.assertEqual(P('///a/b').root, '/')
# POSIX special case for two leading slashes.
self.assertEqual(P('//a/b').root, '//')
def test_eq(self):
P = self.cls
self.assertNotEqual(P('a/b'), P('A/b'))
self.assertEqual(P('/a'), P('///a'))
self.assertNotEqual(P('/a'), P('//a'))
def test_as_uri(self):
P = self.cls
self.assertEqual(P('/').as_uri(), 'file:///')
self.assertEqual(P('/a/b.c').as_uri(), 'file:///a/b.c')
self.assertEqual(P('/a/b%#c').as_uri(), 'file:///a/b%25%23c')
def test_as_uri_non_ascii(self):
from urllib.parse import quote_from_bytes
P = self.cls
try:
os.fsencode('\xe9')
except UnicodeEncodeError:
self.skipTest("\\xe9 cannot be encoded to the filesystem encoding")
self.assertEqual(P('/a/b\xe9').as_uri(),
'file:///a/b' + quote_from_bytes(os.fsencode('\xe9')))
def test_match(self):
P = self.cls
self.assertFalse(P('A.py').match('a.PY'))
def test_is_absolute(self):
P = self.cls
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertTrue(P('/').is_absolute())
self.assertTrue(P('/a').is_absolute())
self.assertTrue(P('/a/b/').is_absolute())
self.assertTrue(P('//a').is_absolute())
self.assertTrue(P('//a/b').is_absolute())
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
self.assertIs(False, P('/dev/con/PRN/NUL').is_reserved())
def test_join(self):
P = self.cls
p = P('//a')
pp = p.joinpath('b')
self.assertEqual(pp, P('//a/b'))
pp = P('/a').joinpath('//c')
self.assertEqual(pp, P('//c'))
pp = P('//a').joinpath('/c')
self.assertEqual(pp, P('/c'))
def test_div(self):
# Basically the same as joinpath().
P = self.cls
p = P('//a')
pp = p / 'b'
self.assertEqual(pp, P('//a/b'))
pp = P('/a') / '//c'
self.assertEqual(pp, P('//c'))
pp = P('//a') / '/c'
self.assertEqual(pp, P('/c'))
class PureWindowsPathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PureWindowsPath
equivalences = _BasePurePathTest.equivalences.copy()
equivalences.update({
'c:a': [ ('c:', 'a'), ('c:', 'a/'), ('/', 'c:', 'a') ],
'c:/a': [
('c:/', 'a'), ('c:', '/', 'a'), ('c:', '/a'),
('/z', 'c:/', 'a'), ('//x/y', 'c:/', 'a'),
],
'//a/b/': [ ('//a/b',) ],
'//a/b/c': [
('//a/b', 'c'), ('//a/b/', 'c'),
],
})
def test_str(self):
p = self.cls('a/b/c')
self.assertEqual(str(p), 'a\\b\\c')
p = self.cls('c:/a/b/c')
self.assertEqual(str(p), 'c:\\a\\b\\c')
p = self.cls('//a/b')
self.assertEqual(str(p), '\\\\a\\b\\')
p = self.cls('//a/b/c')
self.assertEqual(str(p), '\\\\a\\b\\c')
p = self.cls('//a/b/c/d')
self.assertEqual(str(p), '\\\\a\\b\\c\\d')
def test_str_subclass(self):
self._check_str_subclass('c:')
self._check_str_subclass('c:a')
self._check_str_subclass('c:a\\b.txt')
self._check_str_subclass('c:\\')
self._check_str_subclass('c:\\a')
self._check_str_subclass('c:\\a\\b.txt')
self._check_str_subclass('\\\\some\\share')
self._check_str_subclass('\\\\some\\share\\a')
self._check_str_subclass('\\\\some\\share\\a\\b.txt')
def test_eq(self):
P = self.cls
self.assertEqual(P('c:a/b'), P('c:a/b'))
self.assertEqual(P('c:a/b'), P('c:', 'a', 'b'))
self.assertNotEqual(P('c:a/b'), P('d:a/b'))
self.assertNotEqual(P('c:a/b'), P('c:/a/b'))
self.assertNotEqual(P('/a/b'), P('c:/a/b'))
# Case-insensitivity.
self.assertEqual(P('a/B'), P('A/b'))
self.assertEqual(P('C:a/B'), P('c:A/b'))
self.assertEqual(P('//Some/SHARE/a/B'), P('//somE/share/A/b'))
def test_as_uri(self):
P = self.cls
with self.assertRaises(ValueError):
P('/a/b').as_uri()
with self.assertRaises(ValueError):
P('c:a/b').as_uri()
self.assertEqual(P('c:/').as_uri(), 'file:///c:/')
self.assertEqual(P('c:/a/b.c').as_uri(), 'file:///c:/a/b.c')
self.assertEqual(P('c:/a/b%#c').as_uri(), 'file:///c:/a/b%25%23c')
self.assertEqual(P('c:/a/b\xe9').as_uri(), 'file:///c:/a/b%C3%A9')
self.assertEqual(P('//some/share/').as_uri(), 'file://some/share/')
self.assertEqual(P('//some/share/a/b.c').as_uri(),
'file://some/share/a/b.c')
self.assertEqual(P('//some/share/a/b%#c\xe9').as_uri(),
'file://some/share/a/b%25%23c%C3%A9')
def test_match_common(self):
P = self.cls
# Absolute patterns.
self.assertTrue(P('c:/b.py').match('/*.py'))
self.assertTrue(P('c:/b.py').match('c:*.py'))
self.assertTrue(P('c:/b.py').match('c:/*.py'))
self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive
self.assertFalse(P('b.py').match('/*.py'))
self.assertFalse(P('b.py').match('c:*.py'))
self.assertFalse(P('b.py').match('c:/*.py'))
self.assertFalse(P('c:b.py').match('/*.py'))
self.assertFalse(P('c:b.py').match('c:/*.py'))
self.assertFalse(P('/b.py').match('c:*.py'))
self.assertFalse(P('/b.py').match('c:/*.py'))
# UNC patterns.
self.assertTrue(P('//some/share/a.py').match('/*.py'))
self.assertTrue(P('//some/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//other/share/a.py').match('//some/share/*.py'))
self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py'))
# Case-insensitivity.
self.assertTrue(P('B.py').match('b.PY'))
self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY'))
self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY'))
def test_ordering_common(self):
# Case-insensitivity.
def assertOrderedEqual(a, b):
self.assertLessEqual(a, b)
self.assertGreaterEqual(b, a)
P = self.cls
p = P('c:A/b')
q = P('C:a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
p = P('//some/Share/A/b')
q = P('//Some/SHARE/a/B')
assertOrderedEqual(p, q)
self.assertFalse(p < q)
self.assertFalse(p > q)
def test_parts(self):
P = self.cls
p = P('c:a/b')
parts = p.parts
self.assertEqual(parts, ('c:', 'a', 'b'))
p = P('c:/a/b')
parts = p.parts
self.assertEqual(parts, ('c:\\', 'a', 'b'))
p = P('//a/b/c/d')
parts = p.parts
self.assertEqual(parts, ('\\\\a\\b\\', 'c', 'd'))
def test_parent(self):
# Anchored
P = self.cls
p = P('z:a/b/c')
self.assertEqual(p.parent, P('z:a/b'))
self.assertEqual(p.parent.parent, P('z:a'))
self.assertEqual(p.parent.parent.parent, P('z:'))
self.assertEqual(p.parent.parent.parent.parent, P('z:'))
p = P('z:/a/b/c')
self.assertEqual(p.parent, P('z:/a/b'))
self.assertEqual(p.parent.parent, P('z:/a'))
self.assertEqual(p.parent.parent.parent, P('z:/'))
self.assertEqual(p.parent.parent.parent.parent, P('z:/'))
p = P('//a/b/c/d')
self.assertEqual(p.parent, P('//a/b/c'))
self.assertEqual(p.parent.parent, P('//a/b'))
self.assertEqual(p.parent.parent.parent, P('//a/b'))
def test_parents(self):
# Anchored
P = self.cls
p = P('z:a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:a'))
self.assertEqual(par[1], P('z:'))
self.assertEqual(list(par), [P('z:a'), P('z:')])
with self.assertRaises(IndexError):
par[2]
p = P('z:/a/b/')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('z:/a'))
self.assertEqual(par[1], P('z:/'))
self.assertEqual(list(par), [P('z:/a'), P('z:/')])
with self.assertRaises(IndexError):
par[2]
p = P('//a/b/c/d')
par = p.parents
self.assertEqual(len(par), 2)
self.assertEqual(par[0], P('//a/b/c'))
self.assertEqual(par[1], P('//a/b'))
self.assertEqual(list(par), [P('//a/b/c'), P('//a/b')])
with self.assertRaises(IndexError):
par[2]
def test_drive(self):
P = self.cls
self.assertEqual(P('c:').drive, 'c:')
self.assertEqual(P('c:a/b').drive, 'c:')
self.assertEqual(P('c:/').drive, 'c:')
self.assertEqual(P('c:/a/b/').drive, 'c:')
self.assertEqual(P('//a/b').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/').drive, '\\\\a\\b')
self.assertEqual(P('//a/b/c/d').drive, '\\\\a\\b')
def test_root(self):
P = self.cls
self.assertEqual(P('c:').root, '')
self.assertEqual(P('c:a/b').root, '')
self.assertEqual(P('c:/').root, '\\')
self.assertEqual(P('c:/a/b/').root, '\\')
self.assertEqual(P('//a/b').root, '\\')
self.assertEqual(P('//a/b/').root, '\\')
self.assertEqual(P('//a/b/c/d').root, '\\')
def test_anchor(self):
P = self.cls
self.assertEqual(P('c:').anchor, 'c:')
self.assertEqual(P('c:a/b').anchor, 'c:')
self.assertEqual(P('c:/').anchor, 'c:\\')
self.assertEqual(P('c:/a/b/').anchor, 'c:\\')
self.assertEqual(P('//a/b').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/').anchor, '\\\\a\\b\\')
self.assertEqual(P('//a/b/c/d').anchor, '\\\\a\\b\\')
def test_name(self):
P = self.cls
self.assertEqual(P('c:').name, '')
self.assertEqual(P('c:/').name, '')
self.assertEqual(P('c:a/b').name, 'b')
self.assertEqual(P('c:/a/b').name, 'b')
self.assertEqual(P('c:a/b.py').name, 'b.py')
self.assertEqual(P('c:/a/b.py').name, 'b.py')
self.assertEqual(P('//My.py/Share.php').name, '')
self.assertEqual(P('//My.py/Share.php/a/b').name, 'b')
def test_suffix(self):
P = self.cls
self.assertEqual(P('c:').suffix, '')
self.assertEqual(P('c:/').suffix, '')
self.assertEqual(P('c:a/b').suffix, '')
self.assertEqual(P('c:/a/b').suffix, '')
self.assertEqual(P('c:a/b.py').suffix, '.py')
self.assertEqual(P('c:/a/b.py').suffix, '.py')
self.assertEqual(P('c:a/.hgrc').suffix, '')
self.assertEqual(P('c:/a/.hgrc').suffix, '')
self.assertEqual(P('c:a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:/a/.hg.rc').suffix, '.rc')
self.assertEqual(P('c:a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:/a/b.tar.gz').suffix, '.gz')
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffix, '')
self.assertEqual(P('//My.py/Share.php').suffix, '')
self.assertEqual(P('//My.py/Share.php/a/b').suffix, '')
def test_suffixes(self):
P = self.cls
self.assertEqual(P('c:').suffixes, [])
self.assertEqual(P('c:/').suffixes, [])
self.assertEqual(P('c:a/b').suffixes, [])
self.assertEqual(P('c:/a/b').suffixes, [])
self.assertEqual(P('c:a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:/a/b.py').suffixes, ['.py'])
self.assertEqual(P('c:a/.hgrc').suffixes, [])
self.assertEqual(P('c:/a/.hgrc').suffixes, [])
self.assertEqual(P('c:a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:/a/.hg.rc').suffixes, ['.rc'])
self.assertEqual(P('c:a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('c:/a/b.tar.gz').suffixes, ['.tar', '.gz'])
self.assertEqual(P('//My.py/Share.php').suffixes, [])
self.assertEqual(P('//My.py/Share.php/a/b').suffixes, [])
self.assertEqual(P('c:a/Some name. Ending with a dot.').suffixes, [])
self.assertEqual(P('c:/a/Some name. Ending with a dot.').suffixes, [])
def test_stem(self):
P = self.cls
self.assertEqual(P('c:').stem, '')
self.assertEqual(P('c:.').stem, '')
self.assertEqual(P('c:..').stem, '..')
self.assertEqual(P('c:/').stem, '')
self.assertEqual(P('c:a/b').stem, 'b')
self.assertEqual(P('c:a/b.py').stem, 'b')
self.assertEqual(P('c:a/.hgrc').stem, '.hgrc')
self.assertEqual(P('c:a/.hg.rc').stem, '.hg')
self.assertEqual(P('c:a/b.tar.gz').stem, 'b.tar')
self.assertEqual(P('c:a/Some name. Ending with a dot.').stem,
'Some name. Ending with a dot.')
def test_with_name(self):
P = self.cls
self.assertEqual(P('c:a/b').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/b').with_name('d.xml'), P('c:/a/d.xml'))
self.assertEqual(P('c:a/Dot ending.').with_name('d.xml'), P('c:a/d.xml'))
self.assertEqual(P('c:/a/Dot ending.').with_name('d.xml'), P('c:/a/d.xml'))
self.assertRaises(ValueError, P('c:').with_name, 'd.xml')
self.assertRaises(ValueError, P('c:/').with_name, 'd.xml')
self.assertRaises(ValueError, P('//My/Share').with_name, 'd.xml')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:e')
self.assertRaises(ValueError, P('c:a/b').with_name, 'd:/e')
self.assertRaises(ValueError, P('c:a/b').with_name, '//My/Share')
def test_with_suffix(self):
P = self.cls
self.assertEqual(P('c:a/b').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b').with_suffix('.gz'), P('c:/a/b.gz'))
self.assertEqual(P('c:a/b.py').with_suffix('.gz'), P('c:a/b.gz'))
self.assertEqual(P('c:/a/b.py').with_suffix('.gz'), P('c:/a/b.gz'))
# Path doesn't have a "filename" component.
self.assertRaises(ValueError, P('').with_suffix, '.gz')
self.assertRaises(ValueError, P('.').with_suffix, '.gz')
self.assertRaises(ValueError, P('/').with_suffix, '.gz')
self.assertRaises(ValueError, P('//My/Share').with_suffix, '.gz')
# Invalid suffix.
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '/.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '\\.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c:.gz')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, 'c\\d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c/d')
self.assertRaises(ValueError, P('c:a/b').with_suffix, '.c\\d')
def test_relative_to(self):
P = self.cls
p = P('C:Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:foO')), P('Bar'))
self.assertEqual(p.relative_to('c:foO'), P('Bar'))
self.assertEqual(p.relative_to('c:foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:foO/baR')), P())
self.assertEqual(p.relative_to('c:foO/baR'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P())
self.assertRaises(ValueError, p.relative_to, '')
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('Foo'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo/Baz'))
p = P('C:/Foo/Bar')
self.assertEqual(p.relative_to(P('c:')), P('/Foo/Bar'))
self.assertEqual(p.relative_to('c:'), P('/Foo/Bar'))
self.assertEqual(str(p.relative_to(P('c:'))), '\\Foo\\Bar')
self.assertEqual(str(p.relative_to('c:')), '\\Foo\\Bar')
self.assertEqual(p.relative_to(P('c:/')), P('Foo/Bar'))
self.assertEqual(p.relative_to('c:/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('c:/foO')), P('Bar'))
self.assertEqual(p.relative_to('c:/foO'), P('Bar'))
self.assertEqual(p.relative_to('c:/foO/'), P('Bar'))
self.assertEqual(p.relative_to(P('c:/foO/baR')), P())
self.assertEqual(p.relative_to('c:/foO/baR'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('C:/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Bar/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:/Foo/Baz'))
self.assertRaises(ValueError, p.relative_to, P('C:Foo'))
self.assertRaises(ValueError, p.relative_to, P('d:'))
self.assertRaises(ValueError, p.relative_to, P('d:/'))
self.assertRaises(ValueError, p.relative_to, P('/'))
self.assertRaises(ValueError, p.relative_to, P('/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//C/Foo'))
# UNC paths.
p = P('//Server/Share/Foo/Bar')
self.assertEqual(p.relative_to(P('//sErver/sHare')), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare'), P('Foo/Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/'), P('Foo/Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo')), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo'), P('Bar'))
self.assertEqual(p.relative_to('//sErver/sHare/Foo/'), P('Bar'))
self.assertEqual(p.relative_to(P('//sErver/sHare/Foo/Bar')), P())
self.assertEqual(p.relative_to('//sErver/sHare/Foo/Bar'), P())
# Unrelated paths.
self.assertRaises(ValueError, p.relative_to, P('/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('c:/Server/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//z/Share/Foo'))
self.assertRaises(ValueError, p.relative_to, P('//Server/z/Foo'))
def test_is_absolute(self):
P = self.cls
# Under NT, only paths with both a drive and a root are absolute.
self.assertFalse(P().is_absolute())
self.assertFalse(P('a').is_absolute())
self.assertFalse(P('a/b/').is_absolute())
self.assertFalse(P('/').is_absolute())
self.assertFalse(P('/a').is_absolute())
self.assertFalse(P('/a/b/').is_absolute())
self.assertFalse(P('c:').is_absolute())
self.assertFalse(P('c:a').is_absolute())
self.assertFalse(P('c:a/b/').is_absolute())
self.assertTrue(P('c:/').is_absolute())
self.assertTrue(P('c:/a').is_absolute())
self.assertTrue(P('c:/a/b/').is_absolute())
# UNC paths are absolute by definition.
self.assertTrue(P('//a/b').is_absolute())
self.assertTrue(P('//a/b/').is_absolute())
self.assertTrue(P('//a/b/c').is_absolute())
self.assertTrue(P('//a/b/c/d').is_absolute())
def test_join(self):
P = self.cls
p = P('C:/a/b')
pp = p.joinpath('x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('/x/y')
self.assertEqual(pp, P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
pp = p.joinpath('D:x/y')
self.assertEqual(pp, P('D:x/y'))
pp = p.joinpath('D:/x/y')
self.assertEqual(pp, P('D:/x/y'))
pp = p.joinpath('//host/share/x/y')
self.assertEqual(pp, P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
pp = p.joinpath('c:x/y')
self.assertEqual(pp, P('C:/a/b/x/y'))
pp = p.joinpath('c:/x/y')
self.assertEqual(pp, P('C:/x/y'))
def test_div(self):
# Basically the same as joinpath().
P = self.cls
p = P('C:/a/b')
self.assertEqual(p / 'x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'x' / 'y', P('C:/a/b/x/y'))
self.assertEqual(p / '/x/y', P('C:/x/y'))
self.assertEqual(p / '/x' / 'y', P('C:/x/y'))
# Joining with a different drive => the first path is ignored, even
# if the second path is relative.
self.assertEqual(p / 'D:x/y', P('D:x/y'))
self.assertEqual(p / 'D:' / 'x/y', P('D:x/y'))
self.assertEqual(p / 'D:/x/y', P('D:/x/y'))
self.assertEqual(p / 'D:' / '/x/y', P('D:/x/y'))
self.assertEqual(p / '//host/share/x/y', P('//host/share/x/y'))
# Joining with the same drive => the first path is appended to if
# the second path is relative.
self.assertEqual(p / 'c:x/y', P('C:/a/b/x/y'))
self.assertEqual(p / 'c:/x/y', P('C:/x/y'))
def test_is_reserved(self):
P = self.cls
self.assertIs(False, P('').is_reserved())
self.assertIs(False, P('/').is_reserved())
self.assertIs(False, P('/foo/bar').is_reserved())
self.assertIs(True, P('con').is_reserved())
self.assertIs(True, P('NUL').is_reserved())
self.assertIs(True, P('NUL.txt').is_reserved())
self.assertIs(True, P('com1').is_reserved())
self.assertIs(True, P('com9.bar').is_reserved())
self.assertIs(False, P('bar.com9').is_reserved())
self.assertIs(True, P('lpt1').is_reserved())
self.assertIs(True, P('lpt9.bar').is_reserved())
self.assertIs(False, P('bar.lpt9').is_reserved())
# Only the last component matters.
self.assertIs(False, P('c:/NUL/con/baz').is_reserved())
# UNC paths are never reserved.
self.assertIs(False, P('//my/share/nul/con/aux').is_reserved())
class PurePathTest(_BasePurePathTest, unittest.TestCase):
cls = pathlib.PurePath
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath)
def test_different_flavours_unequal(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
self.assertNotEqual(p, q)
def test_different_flavours_unordered(self):
p = pathlib.PurePosixPath('a')
q = pathlib.PureWindowsPath('a')
with self.assertRaises(TypeError):
p < q
with self.assertRaises(TypeError):
p <= q
with self.assertRaises(TypeError):
p > q
with self.assertRaises(TypeError):
p >= q
#
# Tests for the concrete classes.
#
# Make sure any symbolic links in the base test path are resolved.
BASE = os.path.realpath(TESTFN)
join = lambda *x: os.path.join(BASE, *x)
rel_join = lambda *x: os.path.join(TESTFN, *x)
only_nt = unittest.skipIf(os.name != 'nt',
'test requires a Windows-compatible system')
only_posix = unittest.skipIf(os.name == 'nt',
'test requires a POSIX-compatible system')
@only_posix
class PosixPathAsPureTest(PurePosixPathTest):
cls = pathlib.PosixPath
@only_nt
class WindowsPathAsPureTest(PureWindowsPathTest):
cls = pathlib.WindowsPath
def test_owner(self):
P = self.cls
with self.assertRaises(NotImplementedError):
P('c:/').owner()
def test_group(self):
P = self.cls
with self.assertRaises(NotImplementedError):
P('c:/').group()
class _BasePathTest(object):
"""Tests for the FS-accessing functionalities of the Path classes."""
# (BASE)
# |
# |-- brokenLink -> non-existing
# |-- dirA
# | `-- linkC -> ../dirB
# |-- dirB
# | |-- fileB
# | `-- linkD -> ../dirB
# |-- dirC
# | |-- dirD
# | | `-- fileD
# | `-- fileC
# |-- dirE # No permissions
# |-- fileA
# |-- linkA -> fileA
# |-- linkB -> dirB
# `-- brokenLinkLoop -> brokenLinkLoop
#
def setUp(self):
def cleanup():
os.chmod(join('dirE'), 0o777)
support.rmtree(BASE)
self.addCleanup(cleanup)
os.mkdir(BASE)
os.mkdir(join('dirA'))
os.mkdir(join('dirB'))
os.mkdir(join('dirC'))
os.mkdir(join('dirC', 'dirD'))
os.mkdir(join('dirE'))
with open(join('fileA'), 'wb') as f:
f.write(b"this is file A\n")
with open(join('dirB', 'fileB'), 'wb') as f:
f.write(b"this is file B\n")
with open(join('dirC', 'fileC'), 'wb') as f:
f.write(b"this is file C\n")
with open(join('dirC', 'dirD', 'fileD'), 'wb') as f:
f.write(b"this is file D\n")
os.chmod(join('dirE'), 0)
if support.can_symlink():
# Relative symlinks.
os.symlink('fileA', join('linkA'))
os.symlink('non-existing', join('brokenLink'))
self.dirlink('dirB', join('linkB'))
self.dirlink(os.path.join('..', 'dirB'), join('dirA', 'linkC'))
# This one goes upwards, creating a loop.
self.dirlink(os.path.join('..', 'dirB'), join('dirB', 'linkD'))
# Broken symlink (pointing to itself).
os.symlink('brokenLinkLoop', join('brokenLinkLoop'))
if os.name == 'nt':
# Workaround for http://bugs.python.org/issue13772.
def dirlink(self, src, dest):
os.symlink(src, dest, target_is_directory=True)
else:
def dirlink(self, src, dest):
os.symlink(src, dest)
def assertSame(self, path_a, path_b):
self.assertTrue(os.path.samefile(str(path_a), str(path_b)),
"%r and %r don't point to the same file" %
(path_a, path_b))
def assertFileNotFound(self, func, *args, **kwargs):
with self.assertRaises(FileNotFoundError) as cm:
func(*args, **kwargs)
self.assertEqual(cm.exception.errno, errno.ENOENT)
def assertEqualNormCase(self, path_a, path_b):
self.assertEqual(os.path.normcase(path_a), os.path.normcase(path_b))
def _test_cwd(self, p):
q = self.cls(os.getcwd())
self.assertEqual(p, q)
self.assertEqualNormCase(str(p), str(q))
self.assertIs(type(p), type(q))
self.assertTrue(p.is_absolute())
def test_cwd(self):
p = self.cls.cwd()
self._test_cwd(p)
def _test_home(self, p):
q = self.cls(os.path.expanduser('~'))
self.assertEqual(p, q)
self.assertEqualNormCase(str(p), str(q))
self.assertIs(type(p), type(q))
self.assertTrue(p.is_absolute())
def test_home(self):
p = self.cls.home()
self._test_home(p)
def test_samefile(self):
fileA_path = os.path.join(BASE, 'fileA')
fileB_path = os.path.join(BASE, 'dirB', 'fileB')
p = self.cls(fileA_path)
pp = self.cls(fileA_path)
q = self.cls(fileB_path)
self.assertTrue(p.samefile(fileA_path))
self.assertTrue(p.samefile(pp))
self.assertFalse(p.samefile(fileB_path))
self.assertFalse(p.samefile(q))
# Test the non-existent file case
non_existent = os.path.join(BASE, 'foo')
r = self.cls(non_existent)
self.assertRaises(FileNotFoundError, p.samefile, r)
self.assertRaises(FileNotFoundError, p.samefile, non_existent)
self.assertRaises(FileNotFoundError, r.samefile, p)
self.assertRaises(FileNotFoundError, r.samefile, non_existent)
self.assertRaises(FileNotFoundError, r.samefile, r)
self.assertRaises(FileNotFoundError, r.samefile, non_existent)
def test_empty_path(self):
# The empty path points to '.'
p = self.cls('')
self.assertEqual(p.stat(), os.stat('.'))
def test_expanduser_common(self):
P = self.cls
p = P('~')
self.assertEqual(p.expanduser(), P(os.path.expanduser('~')))
p = P('foo')
self.assertEqual(p.expanduser(), p)
p = P('/~')
self.assertEqual(p.expanduser(), p)
p = P('../~')
self.assertEqual(p.expanduser(), p)
p = P(P('').absolute().anchor) / '~'
self.assertEqual(p.expanduser(), p)
def test_exists(self):
P = self.cls
p = P(BASE)
self.assertIs(True, p.exists())
self.assertIs(True, (p / 'dirA').exists())
self.assertIs(True, (p / 'fileA').exists())
self.assertIs(False, (p / 'fileA' / 'bah').exists())
if support.can_symlink():
self.assertIs(True, (p / 'linkA').exists())
self.assertIs(True, (p / 'linkB').exists())
self.assertIs(True, (p / 'linkB' / 'fileB').exists())
self.assertIs(False, (p / 'linkA' / 'bah').exists())
self.assertIs(False, (p / 'foo').exists())
self.assertIs(False, P('/xyzzy').exists())
self.assertIs(False, P(BASE + '\udfff').exists())
self.assertIs(False, P(BASE + '\x00').exists())
def test_open_common(self):
p = self.cls(BASE)
with (p / 'fileA').open('r') as f:
self.assertIsInstance(f, io.TextIOBase)
self.assertEqual(f.read(), "this is file A\n")
with (p / 'fileA').open('rb') as f:
self.assertIsInstance(f, io.BufferedIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
with (p / 'fileA').open('rb', buffering=0) as f:
self.assertIsInstance(f, io.RawIOBase)
self.assertEqual(f.read().strip(), b"this is file A")
def test_read_write_bytes(self):
p = self.cls(BASE)
(p / 'fileA').write_bytes(b'abcdefg')
self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg')
# Check that trying to write str does not truncate the file.
self.assertRaises(TypeError, (p / 'fileA').write_bytes, 'somestr')
self.assertEqual((p / 'fileA').read_bytes(), b'abcdefg')
def test_read_write_text(self):
p = self.cls(BASE)
(p / 'fileA').write_text('äbcdefg', encoding='latin-1')
self.assertEqual((p / 'fileA').read_text(
encoding='utf-8', errors='ignore'), 'bcdefg')
# Check that trying to write bytes does not truncate the file.
self.assertRaises(TypeError, (p / 'fileA').write_text, b'somebytes')
self.assertEqual((p / 'fileA').read_text(encoding='latin-1'), 'äbcdefg')
def test_iterdir(self):
P = self.cls
p = P(BASE)
it = p.iterdir()
paths = set(it)
expected = ['dirA', 'dirB', 'dirC', 'dirE', 'fileA']
if support.can_symlink():
expected += ['linkA', 'linkB', 'brokenLink', 'brokenLinkLoop']
self.assertEqual(paths, { P(BASE, q) for q in expected })
@support.skip_unless_symlink
def test_iterdir_symlink(self):
# __iter__ on a symlink to a directory.
P = self.cls
p = P(BASE, 'linkB')
paths = set(p.iterdir())
expected = { P(BASE, 'linkB', q) for q in ['fileB', 'linkD'] }
self.assertEqual(paths, expected)
def test_iterdir_nodir(self):
# __iter__ on something that is not a directory.
p = self.cls(BASE, 'fileA')
with self.assertRaises(OSError) as cm:
next(p.iterdir())
# ENOENT or EINVAL under Windows, ENOTDIR otherwise
# (see issue #12802).
self.assertIn(cm.exception.errno, (errno.ENOTDIR,
errno.ENOENT, errno.EINVAL))
def test_glob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), { P(BASE, q) for q in expected })
P = self.cls
p = P(BASE)
it = p.glob("fileA")
self.assertIsInstance(it, collections.abc.Iterator)
_check(it, ["fileA"])
_check(p.glob("fileB"), [])
_check(p.glob("dir*/file*"), ["dirB/fileB", "dirC/fileC"])
if not support.can_symlink():
_check(p.glob("*A"), ['dirA', 'fileA'])
else:
_check(p.glob("*A"), ['dirA', 'fileA', 'linkA'])
if not support.can_symlink():
_check(p.glob("*B/*"), ['dirB/fileB'])
else:
_check(p.glob("*B/*"), ['dirB/fileB', 'dirB/linkD',
'linkB/fileB', 'linkB/linkD'])
if not support.can_symlink():
_check(p.glob("*/fileB"), ['dirB/fileB'])
else:
_check(p.glob("*/fileB"), ['dirB/fileB', 'linkB/fileB'])
def test_rglob_common(self):
def _check(glob, expected):
self.assertEqual(set(glob), { P(BASE, q) for q in expected })
P = self.cls
p = P(BASE)
it = p.rglob("fileA")
self.assertIsInstance(it, collections.abc.Iterator)
_check(it, ["fileA"])
_check(p.rglob("fileB"), ["dirB/fileB"])
_check(p.rglob("*/fileA"), [])
if not support.can_symlink():
_check(p.rglob("*/fileB"), ["dirB/fileB"])
else:
_check(p.rglob("*/fileB"), ["dirB/fileB", "dirB/linkD/fileB",
"linkB/fileB", "dirA/linkC/fileB"])
_check(p.rglob("file*"), ["fileA", "dirB/fileB",
"dirC/fileC", "dirC/dirD/fileD"])
p = P(BASE, "dirC")
_check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"])
_check(p.rglob("*/*"), ["dirC/dirD/fileD"])
@support.skip_unless_symlink
def test_rglob_symlink_loop(self):
# Don't get fooled by symlink loops (Issue #26012).
P = self.cls
p = P(BASE)
given = set(p.rglob('*'))
expect = {'brokenLink',
'dirA', 'dirA/linkC',
'dirB', 'dirB/fileB', 'dirB/linkD',
'dirC', 'dirC/dirD', 'dirC/dirD/fileD', 'dirC/fileC',
'dirE',
'fileA',
'linkA',
'linkB',
'brokenLinkLoop',
}
self.assertEqual(given, {p / x for x in expect})
def test_glob_many_open_files(self):
depth = 30
P = self.cls
base = P(BASE) / 'deep'
p = P(base, *(['d']*depth))
p.mkdir(parents=True)
pattern = '/'.join(['*'] * depth)
iters = [base.glob(pattern) for j in range(100)]
for it in iters:
self.assertEqual(next(it), p)
iters = [base.rglob('d') for j in range(100)]
p = base
for i in range(depth):
p = p / 'd'
for it in iters:
self.assertEqual(next(it), p)
def test_glob_dotdot(self):
# ".." is not special in globs.
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("..")), { P(BASE, "..") })
self.assertEqual(set(p.glob("dirA/../file*")), { P(BASE, "dirA/../fileA") })
self.assertEqual(set(p.glob("../xyzzy")), set())
def _check_resolve(self, p, expected, strict=True):
q = p.resolve(strict)
self.assertEqual(q, expected)
# This can be used to check both relative and absolute resolutions.
_check_resolve_relative = _check_resolve_absolute = _check_resolve
@support.skip_unless_symlink
def test_resolve_common(self):
P = self.cls
p = P(BASE, 'foo')
with self.assertRaises(OSError) as cm:
p.resolve(strict=True)
self.assertEqual(cm.exception.errno, errno.ENOENT)
# Non-strict
self.assertEqualNormCase(str(p.resolve(strict=False)),
os.path.join(BASE, 'foo'))
p = P(BASE, 'foo', 'in', 'spam')
self.assertEqualNormCase(str(p.resolve(strict=False)),
os.path.join(BASE, 'foo', 'in', 'spam'))
p = P(BASE, '..', 'foo', 'in', 'spam')
self.assertEqualNormCase(str(p.resolve(strict=False)),
os.path.abspath(os.path.join('foo', 'in', 'spam')))
# These are all relative symlinks.
p = P(BASE, 'dirB', 'fileB')
self._check_resolve_relative(p, p)
p = P(BASE, 'linkA')
self._check_resolve_relative(p, P(BASE, 'fileA'))
p = P(BASE, 'dirA', 'linkC', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
p = P(BASE, 'dirB', 'linkD', 'fileB')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB'))
# Non-strict
p = P(BASE, 'dirA', 'linkC', 'fileB', 'foo', 'in', 'spam')
self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB', 'foo', 'in',
'spam'), False)
p = P(BASE, 'dirA', 'linkC', '..', 'foo', 'in', 'spam')
if os.name == 'nt':
# In Windows, if linkY points to dirB, 'dirA\linkY\..'
# resolves to 'dirA' without resolving linkY first.
self._check_resolve_relative(p, P(BASE, 'dirA', 'foo', 'in',
'spam'), False)
else:
# In Posix, if linkY points to dirB, 'dirA/linkY/..'
# resolves to 'dirB/..' first before resolving to parent of dirB.
self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False)
# Now create absolute symlinks.
d = support._longpath(tempfile.mkdtemp(suffix='-dirD', dir=os.getcwd()))
self.addCleanup(support.rmtree, d)
os.symlink(os.path.join(d), join('dirA', 'linkX'))
os.symlink(join('dirB'), os.path.join(d, 'linkY'))
p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB')
self._check_resolve_absolute(p, P(BASE, 'dirB', 'fileB'))
# Non-strict
p = P(BASE, 'dirA', 'linkX', 'linkY', 'foo', 'in', 'spam')
self._check_resolve_relative(p, P(BASE, 'dirB', 'foo', 'in', 'spam'),
False)
p = P(BASE, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam')
if os.name == 'nt':
# In Windows, if linkY points to dirB, 'dirA\linkY\..'
# resolves to 'dirA' without resolving linkY first.
self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False)
else:
# In Posix, if linkY points to dirB, 'dirA/linkY/..'
# resolves to 'dirB/..' first before resolving to parent of dirB.
self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False)
@support.skip_unless_symlink
def test_resolve_dot(self):
# See https://bitbucket.org/pitrou/pathlib/issue/9/pathresolve-fails-on-complex-symlinks
p = self.cls(BASE)
self.dirlink('.', join('0'))
self.dirlink(os.path.join('0', '0'), join('1'))
self.dirlink(os.path.join('1', '1'), join('2'))
q = p / '2'
self.assertEqual(q.resolve(strict=True), p)
r = q / '3' / '4'
self.assertRaises(FileNotFoundError, r.resolve, strict=True)
# Non-strict
self.assertEqual(r.resolve(strict=False), p / '3' / '4')
def test_with(self):
p = self.cls(BASE)
it = p.iterdir()
it2 = p.iterdir()
next(it2)
with p:
pass
# I/O operation on closed path.
self.assertRaises(ValueError, next, it)
self.assertRaises(ValueError, next, it2)
self.assertRaises(ValueError, p.open)
self.assertRaises(ValueError, p.resolve)
self.assertRaises(ValueError, p.absolute)
self.assertRaises(ValueError, p.__enter__)
def test_chmod(self):
p = self.cls(BASE) / 'fileA'
mode = p.stat().st_mode
# Clear writable bit.
new_mode = mode & ~0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# Set writable bit.
new_mode = mode | 0o222
p.chmod(new_mode)
self.assertEqual(p.stat().st_mode, new_mode)
# XXX also need a test for lchmod.
def test_stat(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(p.stat(), st)
# Change file mode by flipping write bit.
p.chmod(st.st_mode ^ 0o222)
self.addCleanup(p.chmod, st.st_mode)
self.assertNotEqual(p.stat(), st)
@support.skip_unless_symlink
def test_lstat(self):
p = self.cls(BASE)/ 'linkA'
st = p.stat()
self.assertNotEqual(st, p.lstat())
def test_lstat_nosymlink(self):
p = self.cls(BASE) / 'fileA'
st = p.stat()
self.assertEqual(st, p.lstat())
@unittest.skipUnless(pwd, "the pwd module is needed for this test")
def test_owner(self):
p = self.cls(BASE) / 'fileA'
uid = p.stat().st_uid
try:
name = pwd.getpwuid(uid).pw_name
except KeyError:
self.skipTest(
"user %d doesn't have an entry in the system database" % uid)
self.assertEqual(name, p.owner())
@unittest.skipUnless(grp, "the grp module is needed for this test")
def test_group(self):
p = self.cls(BASE) / 'fileA'
gid = p.stat().st_gid
try:
name = grp.getgrgid(gid).gr_name
except KeyError:
self.skipTest(
"group %d doesn't have an entry in the system database" % gid)
self.assertEqual(name, p.group())
def test_unlink(self):
p = self.cls(BASE) / 'fileA'
p.unlink()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
def test_unlink_missing_ok(self):
p = self.cls(BASE) / 'fileAAA'
self.assertFileNotFound(p.unlink)
p.unlink(missing_ok=True)
def test_rmdir(self):
p = self.cls(BASE) / 'dirA'
for q in p.iterdir():
q.unlink()
p.rmdir()
self.assertFileNotFound(p.stat)
self.assertFileNotFound(p.unlink)
def test_link_to(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# linking to another path.
q = P / 'dirA' / 'fileAA'
try:
p.link_to(q)
except PermissionError as e:
self.skipTest('os.link(): %s' % e)
self.assertEqual(q.stat().st_size, size)
self.assertEqual(os.path.samefile(p, q), True)
self.assertTrue(p.stat)
# Linking to a str of a relative path.
r = rel_join('fileAAA')
q.link_to(r)
self.assertEqual(os.stat(r).st_size, size)
self.assertTrue(q.stat)
def test_rename(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# Renaming to another path.
q = P / 'dirA' / 'fileAA'
renamed_p = p.rename(q)
self.assertEqual(renamed_p, q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Renaming to a str of a relative path.
r = rel_join('fileAAA')
renamed_q = q.rename(r)
self.assertEqual(renamed_q, self.cls(r))
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
def test_replace(self):
P = self.cls(BASE)
p = P / 'fileA'
size = p.stat().st_size
# Replacing a non-existing path.
q = P / 'dirA' / 'fileAA'
replaced_p = p.replace(q)
self.assertEqual(replaced_p, q)
self.assertEqual(q.stat().st_size, size)
self.assertFileNotFound(p.stat)
# Replacing another (existing) path.
r = rel_join('dirB', 'fileB')
replaced_q = q.replace(r)
self.assertEqual(replaced_q, self.cls(r))
self.assertEqual(os.stat(r).st_size, size)
self.assertFileNotFound(q.stat)
def test_touch_common(self):
P = self.cls(BASE)
p = P / 'newfileA'
self.assertFalse(p.exists())
p.touch()
self.assertTrue(p.exists())
st = p.stat()
old_mtime = st.st_mtime
old_mtime_ns = st.st_mtime_ns
# Rewind the mtime sufficiently far in the past to work around
# filesystem-specific timestamp granularity.
os.utime(str(p), (old_mtime - 10, old_mtime - 10))
# The file mtime should be refreshed by calling touch() again.
p.touch()
st = p.stat()
self.assertGreaterEqual(st.st_mtime_ns, old_mtime_ns)
self.assertGreaterEqual(st.st_mtime, old_mtime)
# Now with exist_ok=False.
p = P / 'newfileB'
self.assertFalse(p.exists())
p.touch(mode=0o700, exist_ok=False)
self.assertTrue(p.exists())
self.assertRaises(OSError, p.touch, exist_ok=False)
def test_touch_nochange(self):
P = self.cls(BASE)
p = P / 'fileA'
p.touch()
with p.open('rb') as f:
self.assertEqual(f.read().strip(), b"this is file A")
def test_mkdir(self):
P = self.cls(BASE)
p = P / 'newdirA'
self.assertFalse(p.exists())
p.mkdir()
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_parents(self):
# Creating a chain of directories.
p = self.cls(BASE, 'newdirB', 'newdirC')
self.assertFalse(p.exists())
with self.assertRaises(OSError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.ENOENT)
p.mkdir(parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(OSError) as cm:
p.mkdir(parents=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
# Test `mode` arg.
mode = stat.S_IMODE(p.stat().st_mode) # Default mode.
p = self.cls(BASE, 'newdirD', 'newdirE')
p.mkdir(0o555, parents=True)
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
if os.name != 'nt':
# The directory's permissions follow the mode argument.
self.assertEqual(stat.S_IMODE(p.stat().st_mode), 0o7555 & mode)
# The parent's permissions follow the default process settings.
self.assertEqual(stat.S_IMODE(p.parent.stat().st_mode), mode)
def test_mkdir_exist_ok(self):
p = self.cls(BASE, 'dirB')
st_ctime_first = p.stat().st_ctime
self.assertTrue(p.exists())
self.assertTrue(p.is_dir())
with self.assertRaises(FileExistsError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
p.mkdir(exist_ok=True)
self.assertTrue(p.exists())
self.assertEqual(p.stat().st_ctime, st_ctime_first)
def test_mkdir_exist_ok_with_parent(self):
p = self.cls(BASE, 'dirC')
self.assertTrue(p.exists())
with self.assertRaises(FileExistsError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
p = p / 'newdirC'
p.mkdir(parents=True)
st_ctime_first = p.stat().st_ctime
self.assertTrue(p.exists())
with self.assertRaises(FileExistsError) as cm:
p.mkdir(parents=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
p.mkdir(parents=True, exist_ok=True)
self.assertTrue(p.exists())
self.assertEqual(p.stat().st_ctime, st_ctime_first)
def test_mkdir_exist_ok_root(self):
# Issue #25803: A drive root could raise PermissionError on Windows.
self.cls('/').resolve().mkdir(exist_ok=True)
self.cls('/').resolve().mkdir(parents=True, exist_ok=True)
@only_nt # XXX: not sure how to test this on POSIX.
def test_mkdir_with_unknown_drive(self):
for d in 'ZYXWVUTSRQPONMLKJIHGFEDCBA':
p = self.cls(d + ':\\')
if not p.is_dir():
break
else:
self.skipTest("cannot find a drive that doesn't exist")
with self.assertRaises(OSError):
(p / 'child' / 'path').mkdir(parents=True)
def test_mkdir_with_child_file(self):
p = self.cls(BASE, 'dirB', 'fileB')
self.assertTrue(p.exists())
# An exception is raised when the last path component is an existing
# regular file, regardless of whether exist_ok is true or not.
with self.assertRaises(FileExistsError) as cm:
p.mkdir(parents=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
with self.assertRaises(FileExistsError) as cm:
p.mkdir(parents=True, exist_ok=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_no_parents_file(self):
p = self.cls(BASE, 'fileA')
self.assertTrue(p.exists())
# An exception is raised when the last path component is an existing
# regular file, regardless of whether exist_ok is true or not.
with self.assertRaises(FileExistsError) as cm:
p.mkdir()
self.assertEqual(cm.exception.errno, errno.EEXIST)
with self.assertRaises(FileExistsError) as cm:
p.mkdir(exist_ok=True)
self.assertEqual(cm.exception.errno, errno.EEXIST)
def test_mkdir_concurrent_parent_creation(self):
for pattern_num in range(32):
p = self.cls(BASE, 'dirCPC%d' % pattern_num)
self.assertFalse(p.exists())
def my_mkdir(path, mode=0o777):
path = str(path)
# Emulate another process that would create the directory
# just before we try to create it ourselves. We do it
# in all possible pattern combinations, assuming that this
# function is called at most 5 times (dirCPC/dir1/dir2,
# dirCPC/dir1, dirCPC, dirCPC/dir1, dirCPC/dir1/dir2).
if pattern.pop():
os.mkdir(path, mode) # From another process.
concurrently_created.add(path)
os.mkdir(path, mode) # Our real call.
pattern = [bool(pattern_num & (1 << n)) for n in range(5)]
concurrently_created = set()
p12 = p / 'dir1' / 'dir2'
try:
with mock.patch("pathlib._normal_accessor.mkdir", my_mkdir):
p12.mkdir(parents=True, exist_ok=False)
except FileExistsError:
self.assertIn(str(p12), concurrently_created)
else:
self.assertNotIn(str(p12), concurrently_created)
self.assertTrue(p.exists())
@support.skip_unless_symlink
def test_symlink_to(self):
P = self.cls(BASE)
target = P / 'fileA'
# Symlinking a path target.
link = P / 'dirA' / 'linkAA'
link.symlink_to(target)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
# Symlinking a str target.
link = P / 'dirA' / 'linkAAA'
link.symlink_to(str(target))
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertFalse(link.is_dir())
# Symlinking to a directory.
target = P / 'dirB'
link = P / 'dirA' / 'linkAAAA'
link.symlink_to(target, target_is_directory=True)
self.assertEqual(link.stat(), target.stat())
self.assertNotEqual(link.lstat(), target.stat())
self.assertTrue(link.is_dir())
self.assertTrue(list(link.iterdir()))
def test_is_dir(self):
P = self.cls(BASE)
self.assertTrue((P / 'dirA').is_dir())
self.assertFalse((P / 'fileA').is_dir())
self.assertFalse((P / 'non-existing').is_dir())
self.assertFalse((P / 'fileA' / 'bah').is_dir())
if support.can_symlink():
self.assertFalse((P / 'linkA').is_dir())
self.assertTrue((P / 'linkB').is_dir())
self.assertFalse((P/ 'brokenLink').is_dir(), False)
self.assertIs((P / 'dirA\udfff').is_dir(), False)
self.assertIs((P / 'dirA\x00').is_dir(), False)
def test_is_file(self):
P = self.cls(BASE)
self.assertTrue((P / 'fileA').is_file())
self.assertFalse((P / 'dirA').is_file())
self.assertFalse((P / 'non-existing').is_file())
self.assertFalse((P / 'fileA' / 'bah').is_file())
if support.can_symlink():
self.assertTrue((P / 'linkA').is_file())
self.assertFalse((P / 'linkB').is_file())
self.assertFalse((P/ 'brokenLink').is_file())
self.assertIs((P / 'fileA\udfff').is_file(), False)
self.assertIs((P / 'fileA\x00').is_file(), False)
@only_posix
def test_is_mount(self):
P = self.cls(BASE)
R = self.cls('/') # TODO: Work out Windows.
self.assertFalse((P / 'fileA').is_mount())
self.assertFalse((P / 'dirA').is_mount())
self.assertFalse((P / 'non-existing').is_mount())
self.assertFalse((P / 'fileA' / 'bah').is_mount())
self.assertTrue(R.is_mount())
if support.can_symlink():
self.assertFalse((P / 'linkA').is_mount())
self.assertIs(self.cls('/\udfff').is_mount(), False)
self.assertIs(self.cls('/\x00').is_mount(), False)
def test_is_symlink(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_symlink())
self.assertFalse((P / 'dirA').is_symlink())
self.assertFalse((P / 'non-existing').is_symlink())
self.assertFalse((P / 'fileA' / 'bah').is_symlink())
if support.can_symlink():
self.assertTrue((P / 'linkA').is_symlink())
self.assertTrue((P / 'linkB').is_symlink())
self.assertTrue((P/ 'brokenLink').is_symlink())
self.assertIs((P / 'fileA\udfff').is_file(), False)
self.assertIs((P / 'fileA\x00').is_file(), False)
if support.can_symlink():
self.assertIs((P / 'linkA\udfff').is_file(), False)
self.assertIs((P / 'linkA\x00').is_file(), False)
def test_is_fifo_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_fifo())
self.assertFalse((P / 'dirA').is_fifo())
self.assertFalse((P / 'non-existing').is_fifo())
self.assertFalse((P / 'fileA' / 'bah').is_fifo())
self.assertIs((P / 'fileA\udfff').is_fifo(), False)
self.assertIs((P / 'fileA\x00').is_fifo(), False)
@unittest.skipUnless(hasattr(os, "mkfifo"), "os.mkfifo() required")
def test_is_fifo_true(self):
P = self.cls(BASE, 'myfifo')
try:
os.mkfifo(str(P))
except PermissionError as e:
self.skipTest('os.mkfifo(): %s' % e)
self.assertTrue(P.is_fifo())
self.assertFalse(P.is_socket())
self.assertFalse(P.is_file())
self.assertIs(self.cls(BASE, 'myfifo\udfff').is_fifo(), False)
self.assertIs(self.cls(BASE, 'myfifo\x00').is_fifo(), False)
def test_is_socket_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_socket())
self.assertFalse((P / 'dirA').is_socket())
self.assertFalse((P / 'non-existing').is_socket())
self.assertFalse((P / 'fileA' / 'bah').is_socket())
self.assertIs((P / 'fileA\udfff').is_socket(), False)
self.assertIs((P / 'fileA\x00').is_socket(), False)
@unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
def test_is_socket_true(self):
P = self.cls(BASE, 'mysock')
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.addCleanup(sock.close)
try:
sock.bind(str(P))
except OSError as e:
if (isinstance(e, PermissionError) or
"AF_UNIX path too long" in str(e)):
self.skipTest("cannot bind Unix socket: " + str(e))
self.assertTrue(P.is_socket())
self.assertFalse(P.is_fifo())
self.assertFalse(P.is_file())
self.assertIs(self.cls(BASE, 'mysock\udfff').is_socket(), False)
self.assertIs(self.cls(BASE, 'mysock\x00').is_socket(), False)
def test_is_block_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_block_device())
self.assertFalse((P / 'dirA').is_block_device())
self.assertFalse((P / 'non-existing').is_block_device())
self.assertFalse((P / 'fileA' / 'bah').is_block_device())
self.assertIs((P / 'fileA\udfff').is_block_device(), False)
self.assertIs((P / 'fileA\x00').is_block_device(), False)
def test_is_char_device_false(self):
P = self.cls(BASE)
self.assertFalse((P / 'fileA').is_char_device())
self.assertFalse((P / 'dirA').is_char_device())
self.assertFalse((P / 'non-existing').is_char_device())
self.assertFalse((P / 'fileA' / 'bah').is_char_device())
self.assertIs((P / 'fileA\udfff').is_char_device(), False)
self.assertIs((P / 'fileA\x00').is_char_device(), False)
def test_is_char_device_true(self):
# Under Unix, /dev/null should generally be a char device.
P = self.cls('/dev/null')
if not P.exists():
self.skipTest("/dev/null required")
self.assertTrue(P.is_char_device())
self.assertFalse(P.is_block_device())
self.assertFalse(P.is_file())
self.assertIs(self.cls('/dev/null\udfff').is_char_device(), False)
self.assertIs(self.cls('/dev/null\x00').is_char_device(), False)
def test_pickling_common(self):
p = self.cls(BASE, 'fileA')
for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
dumped = pickle.dumps(p, proto)
pp = pickle.loads(dumped)
self.assertEqual(pp.stat(), p.stat())
def test_parts_interning(self):
P = self.cls
p = P('/usr/bin/foo')
q = P('/usr/local/bin')
# 'usr'
self.assertIs(p.parts[1], q.parts[1])
# 'bin'
self.assertIs(p.parts[2], q.parts[3])
def _check_complex_symlinks(self, link0_target):
# Test solving a non-looping chain of symlinks (issue #19887).
P = self.cls(BASE)
self.dirlink(os.path.join('link0', 'link0'), join('link1'))
self.dirlink(os.path.join('link1', 'link1'), join('link2'))
self.dirlink(os.path.join('link2', 'link2'), join('link3'))
self.dirlink(link0_target, join('link0'))
# Resolve absolute paths.
p = (P / 'link0').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = (P / 'link1').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = (P / 'link2').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = (P / 'link3').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
# Resolve relative paths.
old_path = os.getcwd()
os.chdir(BASE)
try:
p = self.cls('link0').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = self.cls('link1').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = self.cls('link2').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
p = self.cls('link3').resolve()
self.assertEqual(p, P)
self.assertEqualNormCase(str(p), BASE)
finally:
os.chdir(old_path)
@support.skip_unless_symlink
def test_complex_symlinks_absolute(self):
self._check_complex_symlinks(BASE)
@support.skip_unless_symlink
def test_complex_symlinks_relative(self):
self._check_complex_symlinks('.')
@support.skip_unless_symlink
def test_complex_symlinks_relative_dot_dot(self):
self._check_complex_symlinks(os.path.join('dirA', '..'))
class PathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.Path
def test_concrete_class(self):
p = self.cls('a')
self.assertIs(type(p),
pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath)
def test_unsupported_flavour(self):
if os.name == 'nt':
self.assertRaises(NotImplementedError, pathlib.PosixPath)
else:
self.assertRaises(NotImplementedError, pathlib.WindowsPath)
def test_glob_empty_pattern(self):
p = self.cls()
with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'):
list(p.glob(''))
@only_posix
class PosixPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.PosixPath
def _check_symlink_loop(self, *args, strict=True):
path = self.cls(*args)
with self.assertRaises(RuntimeError):
print(path.resolve(strict))
def test_open_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
with (p / 'new_file').open('wb'):
pass
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
with (p / 'other_new_file').open('wb'):
pass
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
def test_touch_mode(self):
old_mask = os.umask(0)
self.addCleanup(os.umask, old_mask)
p = self.cls(BASE)
(p / 'new_file').touch()
st = os.stat(join('new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o666)
os.umask(0o022)
(p / 'other_new_file').touch()
st = os.stat(join('other_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o644)
(p / 'masked_new_file').touch(mode=0o750)
st = os.stat(join('masked_new_file'))
self.assertEqual(stat.S_IMODE(st.st_mode), 0o750)
@support.skip_unless_symlink
def test_resolve_loop(self):
# Loops with relative symlinks.
os.symlink('linkX/inside', join('linkX'))
self._check_symlink_loop(BASE, 'linkX')
os.symlink('linkY', join('linkY'))
self._check_symlink_loop(BASE, 'linkY')
os.symlink('linkZ/../linkZ', join('linkZ'))
self._check_symlink_loop(BASE, 'linkZ')
# Non-strict
self._check_symlink_loop(BASE, 'linkZ', 'foo', strict=False)
# Loops with absolute symlinks.
os.symlink(join('linkU/inside'), join('linkU'))
self._check_symlink_loop(BASE, 'linkU')
os.symlink(join('linkV'), join('linkV'))
self._check_symlink_loop(BASE, 'linkV')
os.symlink(join('linkW/../linkW'), join('linkW'))
self._check_symlink_loop(BASE, 'linkW')
# Non-strict
self._check_symlink_loop(BASE, 'linkW', 'foo', strict=False)
def test_glob(self):
P = self.cls
p = P(BASE)
given = set(p.glob("FILEa"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.glob("FILEa*")), set())
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
given = set(p.rglob("FILEd"))
expect = set() if not support.fs_is_case_insensitive(BASE) else given
self.assertEqual(given, expect)
self.assertEqual(set(p.rglob("FILEd*")), set())
@unittest.skipUnless(hasattr(pwd, 'getpwall'),
'pwd module does not expose getpwall()')
def test_expanduser(self):
P = self.cls
support.import_module('pwd')
import pwd
pwdent = pwd.getpwuid(os.getuid())
username = pwdent.pw_name
userhome = pwdent.pw_dir.rstrip('/') or '/'
# Find arbitrary different user (if exists).
for pwdent in pwd.getpwall():
othername = pwdent.pw_name
otherhome = pwdent.pw_dir.rstrip('/')
if othername != username and otherhome:
break
else:
othername = username
otherhome = userhome
p1 = P('~/Documents')
p2 = P('~' + username + '/Documents')
p3 = P('~' + othername + '/Documents')
p4 = P('../~' + username + '/Documents')
p5 = P('/~' + username + '/Documents')
p6 = P('')
p7 = P('~fakeuser/Documents')
with support.EnvironmentVarGuard() as env:
env.pop('HOME', None)
self.assertEqual(p1.expanduser(), P(userhome) / 'Documents')
self.assertEqual(p2.expanduser(), P(userhome) / 'Documents')
self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents')
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
self.assertRaises(RuntimeError, p7.expanduser)
env['HOME'] = '/tmp'
self.assertEqual(p1.expanduser(), P('/tmp/Documents'))
self.assertEqual(p2.expanduser(), P(userhome) / 'Documents')
self.assertEqual(p3.expanduser(), P(otherhome) / 'Documents')
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
self.assertRaises(RuntimeError, p7.expanduser)
@unittest.skipIf(sys.platform != "darwin",
"Bad file descriptor in /dev/fd affects only macOS")
def test_handling_bad_descriptor(self):
try:
file_descriptors = list(pathlib.Path('/dev/fd').rglob("*"))[3:]
if not file_descriptors:
self.skipTest("no file descriptors - issue was not reproduced")
# Checking all file descriptors because there is no guarantee
# which one will fail.
for f in file_descriptors:
f.exists()
f.is_dir()
f.is_file()
f.is_symlink()
f.is_block_device()
f.is_char_device()
f.is_fifo()
f.is_socket()
except OSError as e:
if e.errno == errno.EBADF:
self.fail("Bad file descriptor not handled.")
raise
@only_nt
class WindowsPathTest(_BasePathTest, unittest.TestCase):
cls = pathlib.WindowsPath
def test_glob(self):
P = self.cls
p = P(BASE)
self.assertEqual(set(p.glob("FILEa")), { P(BASE, "fileA") })
def test_rglob(self):
P = self.cls
p = P(BASE, "dirC")
self.assertEqual(set(p.rglob("FILEd")), { P(BASE, "dirC/dirD/fileD") })
def test_expanduser(self):
P = self.cls
with support.EnvironmentVarGuard() as env:
env.pop('HOME', None)
env.pop('USERPROFILE', None)
env.pop('HOMEPATH', None)
env.pop('HOMEDRIVE', None)
env['USERNAME'] = 'alice'
# test that the path returns unchanged
p1 = P('~/My Documents')
p2 = P('~alice/My Documents')
p3 = P('~bob/My Documents')
p4 = P('/~/My Documents')
p5 = P('d:~/My Documents')
p6 = P('')
self.assertRaises(RuntimeError, p1.expanduser)
self.assertRaises(RuntimeError, p2.expanduser)
self.assertRaises(RuntimeError, p3.expanduser)
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
def check():
env.pop('USERNAME', None)
self.assertEqual(p1.expanduser(),
P('C:/Users/alice/My Documents'))
self.assertRaises(KeyError, p2.expanduser)
env['USERNAME'] = 'alice'
self.assertEqual(p2.expanduser(),
P('C:/Users/alice/My Documents'))
self.assertEqual(p3.expanduser(),
P('C:/Users/bob/My Documents'))
self.assertEqual(p4.expanduser(), p4)
self.assertEqual(p5.expanduser(), p5)
self.assertEqual(p6.expanduser(), p6)
# Test the first lookup key in the env vars.
env['HOME'] = 'C:\\Users\\alice'
check()
# Test that HOMEPATH is available instead.
env.pop('HOME', None)
env['HOMEPATH'] = 'C:\\Users\\alice'
check()
env['HOMEDRIVE'] = 'C:\\'
env['HOMEPATH'] = 'Users\\alice'
check()
env.pop('HOMEDRIVE', None)
env.pop('HOMEPATH', None)
env['USERPROFILE'] = 'C:\\Users\\alice'
check()
class CompatiblePathTest(unittest.TestCase):
"""
Test that a type can be made compatible with PurePath
derivatives by implementing division operator overloads.
"""
class CompatPath:
"""
Minimum viable class to test PurePath compatibility.
Simply uses the division operator to join a given
string and the string value of another object with
a forward slash.
"""
def __init__(self, string):
self.string = string
def __truediv__(self, other):
return type(self)(f"{self.string}/{other}")
def __rtruediv__(self, other):
return type(self)(f"{other}/{self.string}")
def test_truediv(self):
result = pathlib.PurePath("test") / self.CompatPath("right")
self.assertIsInstance(result, self.CompatPath)
self.assertEqual(result.string, "test/right")
with self.assertRaises(TypeError):
# Verify improper operations still raise a TypeError
pathlib.PurePath("test") / 10
def test_rtruediv(self):
result = self.CompatPath("left") / pathlib.PurePath("test")
self.assertIsInstance(result, self.CompatPath)
self.assertEqual(result.string, "left/test")
with self.assertRaises(TypeError):
# Verify improper operations still raise a TypeError
10 / pathlib.PurePath("test")
if __name__ == "__main__":
unittest.main()
|
apache-2.0
| 4,477,674,600,441,387,500
| 39.507506
| 96
| 0.527636
| false
| 3.333116
| true
| false
| false
|
cloudfoundry/php-buildpack
|
tests/test_newrelic.py
|
1
|
12713
|
import os
import os.path
import tempfile
import shutil
import json
from nose.tools import eq_
from nose.tools import with_setup
from build_pack_utils import utils
from common.integration import ErrorHelper
from common.components import BuildPackAssertHelper
from common.components import HttpdAssertHelper
from common.components import PhpAssertHelper
from common.components import NoWebServerAssertHelper
from common.components import NewRelicAssertHelper
from common.components import DownloadAssertHelper
from common.base import BaseCompileApp
newrelic = utils.load_extension('extensions/newrelic')
def create_manifest_file(manifest_filename,contents):
file = open(manifest_filename,'w+')
file.write(contents)
file.close()
class TestNewRelic(object):
def setUp(self):
self.manifest_dir = tempfile.mkdtemp()
self.buildpack_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
self.build_dir = tempfile.mkdtemp('build-')
self.php_dir = os.path.join(self.build_dir, 'php', 'etc')
os.makedirs(self.php_dir)
shutil.copy('defaults/config/php/7.3.x/php.ini', self.php_dir)
def tearDown(self):
if os.path.exists(self.build_dir):
shutil.rmtree(self.build_dir)
if os.path.exists(self.manifest_dir):
shutil.rmtree(self.manifest_dir)
def test_set_default_version(self):
manifest_filename = os.path.join(self.manifest_dir, 'manifest.yml')
create_manifest_file(manifest_filename, GOOD_MANIFEST)
# create the object with the buildpack manifest
nr = newrelic.NewRelicInstaller(utils.FormattedDict({
'BUILD_DIR': self.build_dir,
'PHP_VM': 'php',
'BP_DIR': self.buildpack_dir
}))
eq_(True, 'NEWRELIC_VERSION' in nr._ctx.keys())
del nr._ctx['NEWRELIC_VERSION']
# and test it with our custom manifest
nr._set_default_version(manifest_filename)
eq_(True, 'NEWRELIC_VERSION' in nr._ctx.keys())
eq_(nr._ctx['NEWRELIC_VERSION'], '6.4.0.99')
def test_set_default_version_bad_manifest(self):
manifest_filename = os.path.join(self.manifest_dir, 'manifest.yml')
create_manifest_file(manifest_filename, BAD_MANIFEST)
# create the object with the buildpack manifest
nr = newrelic.NewRelicInstaller(utils.FormattedDict({
'BUILD_DIR': self.build_dir,
'PHP_VM': 'php',
'BP_DIR': self.buildpack_dir
}))
# and test it with our custom manifest
exception = None
try:
nr._set_default_version(manifest_filename)
except RuntimeError as e:
exception = e
eq_("Error detecting NewRelic default version", str(exception))
def testDefaults(self):
nr = newrelic.NewRelicInstaller(utils.FormattedDict({
'BUILD_DIR': self.build_dir,
'PHP_VM': 'php',
'BP_DIR': self.buildpack_dir
}))
eq_(True, 'NEWRELIC_HOST' in nr._ctx.keys())
eq_(True, 'NEWRELIC_VERSION' in nr._ctx.keys())
eq_(True, 'NEWRELIC_PACKAGE' in nr._ctx.keys())
eq_(True, 'NEWRELIC_DOWNLOAD_URL' in nr._ctx.keys())
eq_(True, 'NEWRELIC_STRIP' in nr._ctx.keys())
def testShouldNotInstall(self):
nr = newrelic.NewRelicInstaller(utils.FormattedDict({
'BUILD_DIR': self.build_dir,
'BP_DIR': self.buildpack_dir
}))
eq_(False, nr.should_install())
@with_setup(setup=setUp, teardown=tearDown)
def testShouldInstall(self):
ctx = utils.FormattedDict({
'BUILD_DIR': self.build_dir,
'BP_DIR': self.buildpack_dir,
'NEWRELIC_LICENSE': 'JUNK_LICENSE',
'VCAP_APPLICATION': {
'name': 'app-name-1'
},
'PHP_VM': 'php'
})
nr = newrelic.NewRelicInstaller(ctx)
eq_(True, nr.should_install())
eq_('x64', nr._php_arch)
#eq_('@{HOME}/php/lib/php/extensions/no-debug-non-zts-20170718', nr._php_extn_dir)
eq_(False, nr._php_zts)
#eq_('20170718', nr._php_api)
#eq_('@{HOME}/newrelic/agent/x64/newrelic-20170718.so', nr.newrelic_so)
eq_('app-name-1', nr.app_name)
eq_('JUNK_LICENSE', nr.license_key)
eq_('@{HOME}/logs/newrelic.log', nr.log_path)
eq_('@{HOME}/logs/newrelic-daemon.log', nr.daemon_log_path)
eq_('@{HOME}/newrelic/daemon/newrelic-daemon.x64', nr.daemon_path)
eq_('@{HOME}/newrelic/daemon.sock', nr.socket_path)
eq_('@{HOME}/newrelic/daemon.pid', nr.pid_path)
@with_setup(setup=setUp, teardown=tearDown)
def testShouldInstallService(self):
ctx = utils.FormattedDict({
'BP_DIR': self.buildpack_dir,
'BUILD_DIR': self.build_dir,
'VCAP_SERVICES': {
'newrelic': [{
'name': 'newrelic',
'label': 'newrelic',
'tags': ['Monitoring'],
'plan': 'standard',
'credentials': {'licenseKey': 'LICENSE'}}]
},
'VCAP_APPLICATION': {
'name': 'app-name-1'
},
'PHP_VM': 'php'
})
nr = newrelic.NewRelicInstaller(ctx)
eq_(True, nr.should_install())
eq_('x64', nr._php_arch)
#eq_('@{HOME}/php/lib/php/extensions/no-debug-non-zts-20170718',
# nr._php_extn_dir)
eq_(False, nr._php_zts)
#eq_('20170718', nr._php_api)
#eq_('@{HOME}/newrelic/agent/x64/newrelic-20170718.so', nr.newrelic_so)
eq_('app-name-1', nr.app_name)
eq_('LICENSE', nr.license_key)
eq_('@{HOME}/logs/newrelic.log', nr.log_path)
eq_('@{HOME}/logs/newrelic-daemon.log', nr.daemon_log_path)
eq_('@{HOME}/newrelic/daemon/newrelic-daemon.x64', nr.daemon_path)
eq_('@{HOME}/newrelic/daemon.sock', nr.socket_path)
eq_('@{HOME}/newrelic/daemon.pid', nr.pid_path)
@with_setup(setup=setUp, teardown=tearDown)
def testShouldInstallServiceAndManual(self):
ctx = utils.FormattedDict({
'BP_DIR': self.buildpack_dir,
'BUILD_DIR': self.build_dir,
'VCAP_SERVICES': {
'newrelic': [{
'name': 'newrelic',
'label': 'newrelic',
'tags': ['Monitoring'],
'plan': 'standard',
'credentials': {'licenseKey': 'LICENSE'}}]
},
'NEWRELIC_LICENSE': 'LICENSE2',
'VCAP_APPLICATION': {
'name': 'app-name-2'
},
'PHP_VM': 'php'
})
nr = newrelic.NewRelicInstaller(ctx)
eq_(True, nr.should_install())
eq_('x64', nr._php_arch)
# TODO eq_('@{HOME}/php/lib/php/extensions/no-debug-non-zts-20170718',
#nr._php_extn_dir)
eq_(False, nr._php_zts)
# TODO eq_('20170718', nr._php_api)
#eq_('@{HOME}/newrelic/agent/x64/newrelic-20170718.so', nr.newrelic_so)
eq_('app-name-2', nr.app_name)
eq_('LICENSE2', nr.license_key)
eq_('@{HOME}/logs/newrelic.log', nr.log_path)
eq_('@{HOME}/logs/newrelic-daemon.log', nr.daemon_log_path)
eq_('@{HOME}/newrelic/daemon/newrelic-daemon.x64', nr.daemon_path)
eq_('@{HOME}/newrelic/daemon.sock', nr.socket_path)
eq_('@{HOME}/newrelic/daemon.pid', nr.pid_path)
@with_setup(setup=setUp, teardown=tearDown)
def testModifyPhpIni(self):
ctx = utils.FormattedDict({
'BP_DIR': self.buildpack_dir,
'BUILD_DIR': self.build_dir,
'NEWRELIC_LICENSE': 'JUNK_LICENSE',
'VCAP_APPLICATION': {
'name': 'app-name-1'
},
'PHP_VM': 'php'
})
nr = newrelic.NewRelicInstaller(ctx)
nr.modify_php_ini()
with open(os.path.join(self.php_dir, 'php.ini'), 'rt') as php_ini:
lines = php_ini.readlines()
eq_(True, lines.index('extension=%s\n' % nr.newrelic_so) >= 0)
eq_(True, lines.index('[newrelic]\n') >= 0)
eq_(True, lines.index('newrelic.license=@{NEWRELIC_LICENSE}\n') >= 0)
eq_(True, lines.index('newrelic.appname=%s\n' % nr.app_name) >= 0)
class TestNewRelicCompiled(BaseCompileApp):
def __init__(self):
self.app_name = 'app-1'
def setUp(self):
BaseCompileApp.setUp(self)
os.environ['NEWRELIC_LICENSE'] = 'JUNK_LICENSE'
os.environ['VCAP_APPLICATION'] = json.dumps({
'name': 'app-name-1'
})
def test_with_httpd_and_newrelic(self):
# helpers to confirm the environment
bp = BuildPackAssertHelper()
nr = NewRelicAssertHelper()
httpd = HttpdAssertHelper()
php = PhpAssertHelper()
# set web server to httpd, since that's what we're expecting here
self.opts.set_web_server('httpd')
# run the compile step of the build pack
output = ErrorHelper().compile(self.bp)
# confirm downloads
DownloadAssertHelper(3, 2).assert_downloads_from_output(output)
# confirm start script
bp.assert_start_script_is_correct(self.build_dir)
httpd.assert_start_script_is_correct(self.build_dir)
php.assert_start_script_is_correct(self.build_dir)
# confirm bp utils installed
bp.assert_scripts_are_installed(self.build_dir)
bp.assert_config_options(self.build_dir)
# check env & proc files
httpd.assert_contents_of_procs_file(self.build_dir)
httpd.assert_contents_of_env_file(self.build_dir)
php.assert_contents_of_procs_file(self.build_dir)
php.assert_contents_of_env_file(self.build_dir)
# webdir exists
httpd.assert_web_dir_exists(self.build_dir, self.opts.get_webdir())
# check php & httpd installed
httpd.assert_files_installed(self.build_dir)
php.assert_files_installed(self.build_dir)
nr.assert_files_installed(self.build_dir)
class TestNewRelicWithApp5(BaseCompileApp):
def __init__(self):
self.app_name = 'app-5'
def setUp(self):
BaseCompileApp.setUp(self)
os.environ['NEWRELIC_LICENSE'] = 'JUNK_LICENSE'
os.environ['VCAP_APPLICATION'] = json.dumps({
'name': 'app-name-1'
})
def test_standalone(self):
# helpers to confirm the environment
bp = BuildPackAssertHelper()
php = PhpAssertHelper()
none = NoWebServerAssertHelper()
nr = NewRelicAssertHelper()
# no web server
self.opts.set_web_server('none')
# run the compile step of the build pack
output = ErrorHelper().compile(self.bp)
# confirm downloads
DownloadAssertHelper(2, 1).assert_downloads_from_output(output)
# confirm httpd and nginx are not installed
none.assert_no_web_server_is_installed(self.build_dir)
# confirm start script
bp.assert_start_script_is_correct(self.build_dir)
php.assert_start_script_is_correct(self.build_dir)
# confirm bp utils installed
bp.assert_scripts_are_installed(self.build_dir)
# check env & proc files
none.assert_contents_of_procs_file(self.build_dir)
php.assert_contents_of_env_file(self.build_dir)
# webdir exists
none.assert_no_web_dir(self.build_dir, self.opts.get_webdir())
# check php cli installed
none.assert_files_installed(self.build_dir)
nr.assert_files_installed(self.build_dir)
BAD_MANIFEST = '''\
---
language: php
default_versions:
- name: newrelic
version: 99.3.0.161
dependencies:
- name: newrelic
version: 7.4.0.198
uri: https://download.newrelic.com/php_agent/archive/7.4.0.198/newrelic-php5-7.4.0.198-linux.tar.gz
cf_stacks:
- cflinuxfs3
sha256: 3640d3cad6b5199f54a6b54a627235d6
- name: newrelic
version: 6.4.0.99
uri: https://download.newrelic.com/php_agent/archive/6.4.0.99/newrelic-php5-6.4.0.99-linux.tar.gz
cf_stacks:
- cflinuxfs3
sha256: a5d5178f0f8133a65baf942a07408ba6
'''
GOOD_MANIFEST = '''\
---
language: php
default_versions:
- name: newrelic
version: 6.4.0.99
dependencies:
- name: newrelic
version: 7.4.0.198
uri: https://download.newrelic.com/php_agent/archive/7.4.0.198/newrelic-php5-7.4.0.198-linux.tar.gz
cf_stacks:
- cflinuxfs3
sha256: 3640d3cad6b5199f54a6b54a627235d6
- name: newrelic
version: 6.4.0.99
uri: https://download.newrelic.com/php_agent/archive/6.4.0.99/newrelic-php5-6.4.0.99-linux.tar.gz
cf_stacks:
- cflinuxfs3
sha256: a5d5178f0f8133a65baf942a07408ba6
'''
|
apache-2.0
| 1,364,594,228,700,609,300
| 36.391176
| 101
| 0.602454
| false
| 3.261416
| true
| false
| false
|
mcaleavya/bcc
|
examples/tracing/stack_buildid_example.py
|
1
|
3105
|
#!/usr/bin/python
#
# An example usage of stack_build_id
# Most of the code here is borrowed from tools/profile.py
#
# Steps for using this code
# 1) Start ping program in one terminal eg invocation: ping google.com -i0.001
# 2) Change the path of libc specified in b.add_module() below
# 3) Invoke the script as 'python stack_buildid_example.py'
# 4) o/p of the tool is as shown below
# python example/tracing/stack_buildid_example.py
# sendto
# - ping (5232)
# 2
#
# REQUIRES: Linux 4.17+ (BPF_BUILD_ID support)
# Licensed under the Apache License, Version 2.0 (the "License")
# 03-Jan-2019 Vijay Nag
from __future__ import print_function
from bcc import BPF, PerfType, PerfSWConfig
from sys import stderr
from time import sleep
import argparse
import signal
import os
import subprocess
import errno
import multiprocessing
import ctypes as ct
def Get_libc_path():
# A small helper function that returns full path
# of libc in the system
cmd = 'cat /proc/self/maps | grep libc | awk \'{print $6}\' | uniq'
output = subprocess.check_output(cmd, shell=True)
if not isinstance(output, str):
output = output.decode()
return output.split('\n')[0]
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <uapi/linux/bpf_perf_event.h>
#include <linux/sched.h>
struct key_t {
u32 pid;
int user_stack_id;
char name[TASK_COMM_LEN];
};
BPF_HASH(counts, struct key_t);
BPF_STACK_TRACE_BUILDID(stack_traces, 128);
int do_perf_event(struct bpf_perf_event_data *ctx) {
u32 pid = bpf_get_current_pid_tgid() >> 32;
// create map key
struct key_t key = {.pid = pid};
bpf_get_current_comm(&key.name, sizeof(key.name));
key.user_stack_id = stack_traces.get_stackid(&ctx->regs, BPF_F_USER_STACK);
if (key.user_stack_id >= 0) {
counts.increment(key);
}
return 0;
}
"""
b = BPF(text=bpf_text)
b.attach_perf_event(ev_type=PerfType.SOFTWARE,
ev_config=PerfSWConfig.CPU_CLOCK, fn_name="do_perf_event",
sample_period=0, sample_freq=49, cpu=0)
# Add the list of libraries/executables to the build sym cache for sym resolution
# Change the libc path if it is different on a different machine.
# libc.so and ping are added here so that any symbols pertaining to
# libc or ping are resolved. More executables/libraries can be added here.
b.add_module(Get_libc_path())
b.add_module("/usr/sbin/sshd")
b.add_module("/bin/ping")
counts = b.get_table("counts")
stack_traces = b.get_table("stack_traces")
duration = 2
def signal_handler(signal, frame):
print()
try:
sleep(duration)
except KeyboardInterrupt:
# as cleanup can take some time, trap Ctrl-C:
signal.signal(signal.SIGINT, signal_ignore)
user_stack=[]
for k,v in sorted(counts.items(), key=lambda counts: counts[1].value):
user_stack = [] if k.user_stack_id < 0 else \
stack_traces.walk(k.user_stack_id)
user_stack=list(user_stack)
for addr in user_stack:
print(" %s" % b.sym(addr, k.pid).decode('utf-8', 'replace'))
print(" %-16s %s (%d)" % ("-", k.name.decode('utf-8', 'replace'), k.pid))
print(" %d\n" % v.value)
|
apache-2.0
| 8,691,926,209,330,545,000
| 28.571429
| 81
| 0.681804
| false
| 2.991329
| false
| false
| false
|
maschwanden/boxsimu
|
boxsimu/visualize.py
|
1
|
24184
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 13 15:57:03 2017
@author: Mathias Aschwanden (mathias.aschwanden@gmail.com)
"""
import os
import re
import copy
import importlib
import svgwrite
from svgwrite import cm, mm
import numpy as np
from . import utils as bs_utils
class BoxModelSystemSvgHelper:
"""Helper Class to visualize/plot a BoxModelSystem."""
def __init__(self):
self.box_rect_width = 300
self.box_rect_height = 300
self.system_boxes_arrangement_type = 'circle'
self.system_boxes_arrangement_radius = None
self.system_boxes_arrangement_factor = 1.7
self.system_boxes_arrangement_angle_offset = 0
self.flow_stroke_width = 4
self.flow_color = 'darkblue'
self.flow_arrow_triangle_size = 4
self.flux_stroke_width = 4
self.flux_color = 'darkblue'
self.flux_arrow_triangle_size = 10
self.box_svg_helpers = None
self.dwg = None
def save_system_as_svg(self, system, filename):
"""Save the visualization of system as a SVG file."""
if system.N_boxes == 2:
self.system_boxes_arrangement_factor = 1.1
elif system.N_boxes == 3:
self.system_boxes_arrangement_factor = 1.2
elif system.N_boxes == 4:
self.system_boxes_arrangement_factor = 1.4
elif system.N_boxes == 5:
self.system_boxes_arrangement_factor = 1.6
elif system.N_boxes == 6:
self.system_boxes_arrangement_factor = 1.8
# self.dwg = svgwrite.Drawing(size=self._get_system_svg_size())
self.dwg = svgwrite.Drawing(size=('32cm', '10cm'), debug=True)
self.dwg.viewbox(-100, 0, 600, 400)
if not self.box_svg_helpers:
self.box_svg_helpers = self._get_system_box_svg_helpers(system)
system_svg_group = self.get_system_svg_group(system)
self._save_group_as_svg(system_svg_group, filename)
def get_system_svg_group(self, system):
"""Return a SVG representation of the BoxModelSystem instance."""
if not self.box_svg_helpers:
self.box_svg_helpers = self._get_system_box_svg_helpers(system)
group_id = bs_utils.get_valid_svg_id_from_string(system.name)
group = self.dwg.g(id=group_id)
for box_svg_helper in self.box_svg_helpers:
group.add(box_svg_helper.as_svg_group())
for flow in system.flows:
group.add(self._get_flow_arrow(flow))
return group
def save_box_as_svg(self, box, filename=None):
"""Return a SVG representation of the Box instance."""
self.dwg = svgwrite.Drawing(size=self._get_box_svg_size())
self._save_group_as_svg(self.get_box_svg_group(box), filename)
def get_box_svg_group(self, box):
"""Return the SVG representation of the Box instance."""
group_id = bs_utils.get_valid_svg_id_from_string(box.name)
group = self.dwg.g(id=group_id)
box_svg_helper = self._get_box_svg_helper(box)
group.add(box_svg_helper.as_svg_group())
return group
# HELPER functions
def _save_group_as_svg(self, group, filename):
"""Save a svgwrite group instance as a SVG file."""
# dwg = svgwrite.Drawing(filename=filename)
dwg = copy.deepcopy(self.dwg)
dwg.filename = filename
dwg.add(group)
dwg.save()
def _get_system_box_svg_helpers(self, system):
"""Return a list of BoxSvgHelper for all boxes of the system."""
box_positions = self._get_box_positions(system.N_boxes)
box_svg_helpers = [None] * system.N_boxes
for box_name, box in system.boxes.items():
x, y = box_positions[box.id]
tmp_box_svg_helper = self._get_box_svg_helper(box, x, y)
box_svg_helpers[box.id] = tmp_box_svg_helper
box_svg_helpers = self._adjust_box_svg_helper_widths(box_svg_helpers)
return box_svg_helpers
def _get_box_svg_helper(self, box, x=0, y=0):
box_group_id = '{}_box'.format(box.name)
box_svg_helper = BoxSvgHelper(
group_id=box_group_id,
x=x, y=y,
width=self.box_rect_width,
height=self.box_rect_height,
text_lines=[
'Fluid: {}'.format(box.fluid.name),
'Mass: {:.3e}'.format(box.mass),
],
title=box.description,
)
procsses_group_id = '{}_processes'.format(box.name)
box_process_names = [p.name for p in box.processes]
while len(box_process_names) < 3:
box_process_names.append('')
processes = box_svg_helper.add_child(
group_id=procsses_group_id,
text_lines=box_process_names,
title='Processes',
)
reaction_group_id = '{}_reactions'.format(box.name)
box_reaction_names = [p.name for p in box.reactions]
while len(box_reaction_names) < 3:
box_reaction_names.append('')
reactions = box_svg_helper.add_child(
group_id=reaction_group_id,
text_lines=box_reaction_names,
title='Reactions',
)
return box_svg_helper
def _get_box_positions(self, N_nodes):
positions = []
angle_offset = self.system_boxes_arrangement_angle_offset
radius = self.system_boxes_arrangement_radius
if not radius:
radius_factor = self.system_boxes_arrangement_factor
radius = radius_factor * max(self.box_rect_width,
self.box_rect_height)
for i in range(N_nodes):
if self.system_boxes_arrangement_type == 'half_circle':
angle = (i * np.pi / (N_nodes-1)) + angle_offset
else: # if self.system_boxes_arrangement_type == 'circle':
angle = (i * 2 * np.pi / (N_nodes)) + angle_offset
x = radius * np.cos(angle)
y = radius * np.sin(angle)
positions.append((x,y))
return positions
def _adjust_box_svg_helper_widths(self, helpers):
"""Adjust all box_svg_helpers to the same width."""
max_width = 0
for helper in helpers:
if helper.width > max_width:
max_width = helper.width
for helper in helpers:
helper._width = max_width
return helpers
def _distance_sort_corners(self, helper, reference_point):
"""Return corners sorted on the distance to a point."""
reference_point = np.array(reference_point)
corners = helper.get_box_rect_corner_coordinates()
np_corners = [np.array(c) for c in corners]
print('corners', corners)
distances = [np.linalg.norm(c-reference_point) for c in np_corners]
sorted_corners = [c for (distance,c) in sorted(zip(distances,corners))]
return sorted_corners
def _get_center_between_points(self, p1, p2):
p1 = np.array(p1)
p2 = np.array(p2)
return (p1 + p2)/2
def _get_conncection_point_relative_to_reference_point(self, helper,
reference_point):
"""Return connection point for flow lines relative to ref point."""
sorted_corners = self._distance_sort_corners(helper, reference_point)
p1, p2 = sorted_corners[:2]
connection_point = self._get_center_between_points(p1, p2)
return connection_point
def _get_flow_arrow(self, flow):
src_point = None
trg_point = None
if not flow.source_box:
helper = self.box_svg_helpers[flow.target_box.id]
box_center = np.array(
helper.get_box_rect_center_cooridnates())
box_connection_point_to_origin = np.array(
self._get_conncection_point_relative_to_reference_point(
helper, (0,0)))
v1 = box_center - box_connection_point_to_origin
trg_point = box_center + v1
src_point = trg_point + 0.5 * v1
elif not flow.target_box:
helper = self.box_svg_helpers[flow.source_box.id]
box_center = np.array(
helper.get_box_rect_center_cooridnates())
box_connection_point_to_origin = np.array(
self._get_conncection_point_relative_to_reference_point(
helper, (0,0)))
v1 = box_center - box_connection_point_to_origin
src_point = box_center + v1
trg_point = src_point + 0.5 * v1
else:
src_helper = self.box_svg_helpers[flow.source_box.id]
trg_helper = self.box_svg_helpers[flow.target_box.id]
src_point = self._get_conncection_point_relative_to_reference_point(
src_helper, (0,0))
trg_point = self._get_conncection_point_relative_to_reference_point(
trg_helper, (0,0))
arrow = self._get_arrow(start=src_point, end=trg_point,
stroke_color=self.flow_color,
stroke_width=self.flow_stroke_width,
triangle_size=self.flow_arrow_triangle_size)
return arrow
def _get_arrow(self, start, end, stroke_color, stroke_width,
triangle_size):
arrow_vector = end - start
arrow_unit_vector = arrow_vector / np.linalg.norm(arrow_vector)
rot90_matrix = self._get_rot90_matrix()
arrow_unit_normal_vector = np.dot(rot90_matrix, arrow_unit_vector)
triangle_point1 = triangle_size * arrow_unit_vector
triangle_point2 = 0.5 * triangle_size * arrow_unit_normal_vector
triangle_point3 = -0.5 * triangle_size * arrow_unit_normal_vector
end[0] += triangle_size
arrow = self.dwg.line(start=start, end=end, stroke=stroke_color,
stroke_width=stroke_width)
marker = self.dwg.marker(insert=0.75*arrow_unit_vector*triangle_size,
size=(triangle_size, triangle_size))
marker.add(self.dwg.polygon([triangle_point1, triangle_point2,
triangle_point3], fill=stroke_color))
self.dwg.defs.add(marker)
arrow.set_markers((None, None, marker))
return arrow
def _get_rot90_matrix(self):
angle = np.deg2rad(90)
return np.array([[np.cos(angle), np.sin(angle)],
[-np.sin(angle), np.cos(angle)]])
def _get_system_svg_size(self):
return (100, 100)
def _get_box_svg_size(self):
return (100, 100)
class BoxSvgHelper:
def __init__(self, group_id, x, y, width, height=None,
text_lines=None, title=None):
text_lines = text_lines or []
if not height and len(text_lines) == 0:
raise ValueError('Either height or text_lines must be given.')
self.group_id = bs_utils.get_valid_svg_id_from_string(group_id)
self._x = x
self._y = y
self._height = height
self._width = width
self.text_lines = text_lines
self.text_font_size = 12
self.text_font_color = 'black'
self.text_alignement = 'left'
self.title = title
self.title_font_size = 24
self.title_font_color = 'black'
self.title_alignement = 'middle'
self.child_title_font_size = 15
self.child_title_font_color = 'black'
self.child_title_alignement = 'left'
self.title_extern = True
self.child_title_extern = True
self.color = 'lightgrey'
self.opacity = 0.7
self.stroke_color = 'black'
self.stroke_width = 5
self.stroke_opacity = 1
self.child_relative_width = 0.925
self.child_color = 'darkgrey'
self.child_opacity = 0.5
self.child_stroke_color = 'white'
self.child_stroke_width = 3
self.child_stroke_opacity = 1
self._content_absolute_margin = 10
# Maximal widht of the character 'W' in the title and text
self.title_max_W_width = self.title_font_size
self.text_max_W_width = self.text_font_size
self.title_avg_char_width = 0.8 * self.title_max_W_width
self.text_avg_char_width = 0.8 * self.text_max_W_width
self.children = []
self.dwg = svgwrite.Drawing()
self.group = self.dwg.g(id=group_id)
@property
def width(self):
"""Return width of the instance."""
width = self._width
max_title_width = self.get_max_title_width()
max_text_width = self.get_max_text_width()
max_children_width = self.get_max_children_width()
if max_title_width > width:
width = max_title_width
if max_text_width > width:
width = max_text_width
if max_children_width > width:
width = max_children_width
self._width = width
self._adjust_children_width()
return self._width
@property
def height(self):
"""Return height of the instance."""
height = 0
if self._height:
height = self._height
element_height = (self.get_text_height() + self.get_title_height() +
self.get_children_height())
if element_height > height:
height = element_height
return height
@property
def x(self):
"""Return left edge of the instance."""
return self._x
@property
def y(self):
"""Return top edge of the instance."""
return self._y
@property
def content_absolute_margin(self):
if not self._content_absolute_margin:
width_relative = self.child_relative_width
self._content_absolute_margin = ((1-width_relative)/2 * self._width)
return self._content_absolute_margin
# PUBLIC functions
def as_svg_group(self):
"""Return the SVG representation of the instance."""
self._adjust_children_width()
self.group.add(self._get_svg_rect_element())
title = self._get_svg_title_element()
if title:
self.group.add(title)
text = self._get_svg_text_element()
if text:
self.group.add(text)
children = self._get_svg_children_element()
if children:
self.group.add(children)
return self.group
def add_child(self, group_id, text_lines=None, height=None,
width_relative=None, title=None):
"""Add a child instance."""
text_lines = text_lines or []
if not height and len(text_lines) == 0:
raise ValueError('Either height or text_lines must be given.')
width_relative = self.child_relative_width
x = self.x + self.content_absolute_margin
y = self.get_children_bottom_y()
width = width_relative * self.width
child = self.__class__(group_id, x, y, width,
height=height, text_lines=text_lines, title=title)
child.title_extern = self.child_title_extern
child.color = self.child_color
child.opacity = self.child_opacity
child.stroke_color = self.child_stroke_color
child.stroke_width = self.child_stroke_width
child.stroke_opacity = self.child_stroke_opacity
child.title_font_size = self.child_title_font_size
child.title_font_color = self.child_title_font_color
child.title_alignement = self.child_title_alignement
self.children.append(child)
return child
# RECT info functions
def get_rect_height(self):
"""Return height of the rect element."""
height = self.height
if self.title_extern:
height = self.height - self.get_title_height()
return height
def get_rect_top_y(self):
"""Return upper edge of the rect element."""
y = self.y
if self.title_extern:
y = self.get_title_bottom_y()
return y
def get_rect_bottom_y(self):
"""Return bottom edge of the rect element."""
y = self.get_rect_top_y() + self.get_rect_height()
return y
# TITLE info functions
def get_max_title_width(self):
"""Return approx. maximal width (px) of title text."""
max_width = 0
if self.title:
max_width = len(self.title.strip()) * self.title_avg_char_width
return max_width
def get_title_height(self):
"""Return total height (with margins) of the title element."""
height = 0
if self.title:
height = 1.5 * self.title_font_size
return height
def get_title_top_y(self):
"""Return upper edge of title."""
y = self.y
return y
def get_title_bottom_y(self):
"""Return bottom edge of title."""
y = self.get_title_top_y() + self.get_title_height()
return y
# TEXT info functions
def get_max_text_width(self):
"""Return approx. maximal width (px) of all text lines."""
max_width = 0
if self.text_lines:
for text in self.text_lines:
tmp_width = len(text.strip()) * self.text_avg_char_width
if tmp_width > max_width:
max_width = tmp_width
return max_width
def get_text_height(self):
"""Return total height (with margins) of the text lines."""
height = 0
if self.text_lines:
height = ((len(self.text_lines) * 1.5 + 0.5) *
self.text_font_size)
return height
def get_text_top_y(self):
"""Return upper edge of text lines."""
y = self.get_title_bottom_y()
return y
def get_text_bottom_y(self):
"""Return bottom edge of text lines."""
y = self.get_text_top_y() + self.get_text_height()
return y
# CHILD info functions
def get_max_children_width(self):
"""Return approx. maximal width (px) of the all children."""
max_width = 0
if self.children:
for boxrect in self.children:
boxrect_width = boxrect.width
needed_width = boxrect_width + 2 * self.content_absolute_margin
if needed_width > max_width:
max_width = needed_width
return max_width
def get_children_height(self):
"""Return total height (with margins) of all children."""
height = 0
if self.children:
for rect in self.children:
# increase children height by the height of the child_rect plus
# a margin equal to the text_font_size
height += rect.height + self.text_font_size
return height
def get_children_top_y(self):
"""Return upper edge of children."""
y = self.get_text_bottom_y()
return y
def get_children_bottom_y(self):
"""Return bottom edge of children."""
y = self.get_children_top_y() + self.get_children_height()
return y
def get_box_rect_corner_coordinates(self):
"""Return coordinates of corners of the rect-element of the instance.
Return coordinates as a list of tuples begining with the top left,
followed by the top right, bottom right, and bottom left corner.
Return:
corner_coords (list of tuple of floats):
[(tl_x, tl_y), (tr_x, tr_y), (br_x, br_y), (bl_x, bl_y)]
"""
# top left corner
tl = (self.x, self.y)
tr = (self.x + self.width, self.y)
br = (self.x + self.width, self.y + self.height)
bl = (self.x, self.y + self.height)
return [tl, tr, br, bl]
def get_box_rect_center_cooridnates(self):
"""Return coordinates of the center of the rect-element."""
return (self.x + 0.5 * self.width, self.y + 0.5 * self.height)
# HELPER functions
def _get_svg_rect_element(self):
"""Return a rect svg element of the instance."""
rect_id = '{}_rect'.format(self.group_id)
y = self.get_rect_top_y()
height = self.get_rect_height()
return self._rect(self.x, y, self.width, height, rect_id)
def _get_svg_title_element(self):
"""Return a text svg element of the instance's title."""
if self.title:
if self.title_alignement == 'middle':
x = self.x + 0.5 * self.width
elif self.title_alignement == 'left':
if self.title_extern:
x = self.x
else:
x = self.x + self.content_absolute_margin
else:
raise ValueError('title_alignement must be "middle" or "left".')
y = self.get_title_top_y() + 0.5 * self.get_title_height()
title_id = '{}_title'.format(self.group_id)
return self._title(self.title.strip(), x, y, title_id)
def _get_svg_text_element(self):
"""Return a svg group with all text lines as text svg elements."""
if self.text_lines:
if self.text_alignement == 'middle':
x = self.x + 0.5 * self.width
elif self.text_alignement == 'left':
x = self.x + self.content_absolute_margin
else:
raise ValueError('text_alignement must be "middle" or "left".')
text_group = self.dwg.g(id='{}_text'.format(self.group_id))
rel_text_y = 1.0 / (len(self.text_lines) + 1)
for i, text in enumerate(self.text_lines):
rel_pos = (i + 1) * rel_text_y
y = self.get_text_top_y() + rel_pos * self.get_text_height()
text_group.add(self._text(text.strip(), x, y))
return text_group
def _get_svg_children_element(self):
"""Return the complete svg-representation of all children."""
children_group = self.dwg.g(id='{}_children'.format(self.group_id))
for child_rect in self.children:
children_group.add(child_rect.as_svg_group())
return children_group
def _rect(self, x, y, width, height, rect_id=None):
return self.dwg.rect(
insert=(x, y),
size=(width, height),
fill=self.color,
opacity=self.opacity,
stroke=self.stroke_color,
stroke_width=self.stroke_width,
stroke_opacity=self.stroke_opacity,
id=rect_id
)
def _title(self, string, x, y, title_id=None):
style_template = 'text-anchor:{}; dominant-baseline:mathematical'
style = style_template.format(self.title_alignement)
return self.dwg.text(
string,
insert=(x,y),
fill=self.title_font_color,
font_size=self.title_font_size,
style=style,
id=title_id,
)
def _text(self, string, x, y, text_id=None):
style_template = 'text-anchor:{}; dominant-baseline:mathematical'
style = style_template.format(self.text_alignement)
return self.dwg.text(
string,
insert=(x,y),
fill=self.text_font_color,
font_size=self.text_font_size,
style=style,
id=text_id,
)
def _adjust_children_width(self):
"""Correct/Adjust the width and x-pos of all child boxrects.
Due to the dynamic width/height of the master-box, child boxes that
are generated at different times can differ in their width. That's
why before the final svg element is generated, all width and x-pos
of the child boxrects are corrected first.
"""
width = copy.copy(self._width)
child_width = width - 2 * self.content_absolute_margin
child_x = self.x + self.content_absolute_margin
for boxrect in self.children:
boxrect._width = child_width
boxrect._x = child_x
boxrect._content_absolute_margin = self.content_absolute_margin
boxrect._adjust_children_width()
|
mit
| -172,437,947,065,888,160
| 35.476621
| 80
| 0.571204
| false
| 3.6849
| false
| false
| false
|
OCM-Lab-PUC/switch-chile
|
python_utility_scripts/existing_projects_plant_grouping.py
|
1
|
5767
|
# -*- coding: utf-8 -*-
# Copyright 2016 The Switch-Chile Authors. All rights reserved.
# Licensed under the Apache License, Version 2, which is in the LICENSE file.
# Operations, Control and Markets laboratory at Pontificia Universidad
# Católica de Chile.
"""
Groups generation units by plant to reduce number of variables. This is
not adequate when performing UC or OPF, but is acceptable when considering
long term planning.
"""
from csv import reader, writer
from getpass import getpass
import sys
import psycopg2
############
# Parameters
# Name of the ungrouped file
csv_infile = 'centrales.csv'
# Type of grouping. More than one may be chosen, to get multiple outputs
group_by_plant_name = True
plants = [
'abanico','angostura','antilhue','arauco','atacama',
'bocamina','callao','candelaria','casblanca','chuyaca',
'cmpc_pacifico','cochrane','colihues','arica','enaex',
'iquique','diesel_zofri','emelda','escuadron','esperanza_u',
'estandartes_zofri','florida','guacolda','hidrobonito',
'huasco_tg','isla','laguna_verde','laja_u','lalackama','lautaro',
'loma_los_colorados','los_corrales','los_morros','los_quilos',
'lousiana_pacific','maitenes','multiexport','munilque',
'pilmaiquen','pozo_almonte_solar','puntilla','quilleco',
'quintero','salmofood','san_lorenzo_de_d_de_almagro','santa_marta',
'skretting','solar_jama','taltal_','angamos','mejillones',
'norgener','tocopilla','tomaval','ujina','ventanas_','watt',
'yungay'
]
#atacama tiene dos consumos distintos, quedarse con el segundo'
#tocopilla tiene varios grupos. elegir con cuidado
def plant_name(name):
for p_name in plants:
if p_name in name:
return p_name
return ''
ok_to_group = False
def group_plant(units):
max_power = 0
n_units = 0
spec_cons = 0
for u in units:
n_units += int(u[2])
max_power += float(u[5]) * int(u[2])
if u[12] != '':
spec_cons += float(u[12])
else:
spec_cons = None
# Average specific consumption rate of fuel
if spec_cons:
spec_cons = spec_cons/n_units
units[-1][0] = 'central_'+plant_name(units[-1][0])
units[-1][2] = n_units
units[-1][5] = max_power
units[-1][12] = spec_cons
return units[-1]
grouped_units = []
with open(csv_infile, 'r') as f:
all_units = []
read = reader(f)
for row in read:
all_units.append(row)
all_units.sort()
aux_plant = []
for index, unit in enumerate(all_units):
name = unit[0]
if plant_name(name) == 'tocopilla' or plant_name(name) == 'mejillones':
grouped_units.append(unit)
continue
if plant_name(name) != '' and plant_name(name) != plant_name(all_units[index-1][0]):
# If its the first plant to be grouped, skip grouping
if ok_to_group == True:
# Group previous plant
grouped_units.append(group_plant(aux_plant))
# And start storing the new one
aux_plant = [unit]
else:
ok_to_group = True
elif plant_name(name) != '':
aux_plant.append(unit)
else:
grouped_units.append(unit)
# Group the last plant
grouped_units.append(group_plant(aux_plant))
with open('grouped_plants.csv', 'w') as out:
csv_writer = writer(out, delimiter = ',')
for plant in grouped_units:
csv_writer.writerow(plant)
##############################
####### UPLOAD TO DB #########
projects_for_db = []
with open('grouped_plants.csv', 'r') as f:
read = reader(f)
for row in read:
for i in range(11, 20):
# Enter null values if fuel info not present
if not row[i]:
row[i] = None
projects_for_db.append(row)
##############
# DB Conection
username = 'bmaluenda'
passw = getpass('Enter database password for user %s' % username)
try:
# Remember to enter and/or modify connection parameters accordingly to your
# setup
con = psycopg2.connect(database='switch_chile', user=username,
host='localhost', port='5915',
password=passw)
print ("Connection to database established...")
except:
sys.exit("Error connecting to the switch_chile database...")
cur = con.cursor()
# Clean database
try:
cleaning = "DELETE FROM chile_new.geo_existing_projects"
cur.execute(cleaning)
print("Table erased")
except psycopg2.DatabaseError as e:
if con:
con.rollback()
print(e)
# Load new data
try:
values_str = ','.join(cur.mogrify("(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
project) for project in projects_for_db)
query_str = "INSERT INTO chile_new.geo_existing_projects (db_name, system, units, main_energy_source, start_date, max_net_power, min_net_power, connection_point, voltage_connection, easting, northing, fuel_1, specific_consumption_1, units_specific_consumption_1, fuel_2, specific_consumption_2, units_specific_consumption_2, fuel_3, specific_consumption_3, units_specific_consumption_3) VALUES "+values_str+";"
cur.execute(query_str)
con.commit()
print ("New existing project data has been uploaded to the DB.")
except psycopg2.DatabaseError as e:
if con:
con.rollback()
print(e)
# Update geometry column with new coordinates
try:
query_str = "UPDATE chile_new.geo_existing_projects SET geom = ST_SetSrid(ST_MakePoint(easting, northing), 32718)"
cur.execute(query_str)
con.commit()
print ("Updated geometry column with new data.")
except psycopg2.DatabaseError as e:
if con:
con.rollback()
print(e)
if cur:
cur.close()
if con:
con.close()
|
apache-2.0
| -6,822,856,848,889,920,000
| 31.761364
| 414
| 0.624176
| false
| 3.241147
| false
| false
| false
|
marinho/PyNFe
|
pynfe/processamento/serializacao.py
|
1
|
17276
|
# -*- coding: utf-8 -*-
try:
set
except:
from sets import Set as set
from pynfe.entidades import Emitente, Cliente, Produto, Transportadora, NotaFiscal
from pynfe.excecoes import NenhumObjetoEncontrado, MuitosObjetosEncontrados
from pynfe.utils import etree, so_numeros, obter_municipio_por_codigo, obter_pais_por_codigo
from pynfe.utils.flags import CODIGOS_ESTADOS, VERSAO_PADRAO
class Serializacao(object):
"""Classe abstrata responsavel por fornecer as funcionalidades basicas para
exportacao e importacao de Notas Fiscais eletronicas para formatos serializados
de arquivos. Como XML, JSON, binario, etc.
Nao deve ser instanciada diretamente!"""
_fonte_dados = None
_ambiente = 1 # 1 = Produção, 2 = Homologação
_nome_aplicacao = 'PyNFe'
def __new__(cls, *args, **kwargs):
if cls == Serializacao:
raise Exception('Esta classe nao pode ser instanciada diretamente!')
else:
return super(Serializacao, cls).__new__(cls, *args, **kwargs)
def __init__(self, fonte_dados, homologacao=False):
self._fonte_dados = fonte_dados
self._ambiente = homologacao and 2 or 1
def exportar(self, destino, **kwargs):
"""Gera o(s) arquivo(s) de exportacao a partir da Nofa Fiscal eletronica
ou lista delas."""
raise Exception('Metodo nao implementado')
def importar(self, origem):
"""Fabrica que recebe o caminho ou objeto de origem e instancia os objetos
da PyNFe"""
raise Exception('Metodo nao implementado')
class SerializacaoXML(Serializacao):
_versao = VERSAO_PADRAO
def exportar(self, destino=None, retorna_string=False, **kwargs):
"""Gera o(s) arquivo(s) de Nofa Fiscal eletronica no padrao oficial da SEFAZ
e Receita Federal, para ser(em) enviado(s) para o webservice ou para ser(em)
armazenado(s) em cache local."""
# No raiz do XML de saida
raiz = etree.Element('NFe', xmlns="http://www.portalfiscal.inf.br/nfe")
# Carrega lista de Notas Fiscais
notas_fiscais = self._fonte_dados.obter_lista(_classe=NotaFiscal, **kwargs)
for nf in notas_fiscais:
raiz.append(self._serializar_notas_fiscal(nf, retorna_string=False))
if retorna_string:
return etree.tostring(raiz, pretty_print=True)
else:
return raiz
def importar(self, origem):
"""Cria as instancias do PyNFe a partir de arquivos XML no formato padrao da
SEFAZ e Receita Federal."""
raise Exception('Metodo nao implementado')
def _serializar_emitente(self, emitente, tag_raiz='emit', retorna_string=True):
raiz = etree.Element(tag_raiz)
# Dados do emitente
etree.SubElement(raiz, 'CNPJ').text = so_numeros(emitente.cnpj)
etree.SubElement(raiz, 'xNome').text = emitente.razao_social
etree.SubElement(raiz, 'xFant').text = emitente.nome_fantasia
etree.SubElement(raiz, 'IE').text = emitente.inscricao_estadual
# Endereço
endereco = etree.SubElement(raiz, 'enderEmit')
etree.SubElement(endereco, 'xLgr').text = emitente.endereco_logradouro
etree.SubElement(endereco, 'nro').text = emitente.endereco_numero
etree.SubElement(endereco, 'xCpl').text = emitente.endereco_complemento
etree.SubElement(endereco, 'xBairro').text = emitente.endereco_bairro
etree.SubElement(endereco, 'cMun').text = emitente.endereco_municipio
etree.SubElement(endereco, 'xMun').text = obter_municipio_por_codigo(
emitente.endereco_municipio, emitente.endereco_uf,
)
etree.SubElement(endereco, 'UF').text = emitente.endereco_uf
etree.SubElement(endereco, 'CEP').text = so_numeros(emitente.endereco_cep)
etree.SubElement(endereco, 'cPais').text = emitente.endereco_pais
etree.SubElement(endereco, 'xPais').text = obter_pais_por_codigo(emitente.endereco_pais)
etree.SubElement(endereco, 'fone').text = emitente.endereco_telefone
if retorna_string:
return etree.tostring(raiz, pretty_print=True)
else:
return raiz
def _serializar_cliente(self, cliente, tag_raiz='dest', retorna_string=True):
raiz = etree.Element(tag_raiz)
# Dados do cliente
etree.SubElement(raiz, cliente.tipo_documento).text = so_numeros(cliente.numero_documento)
etree.SubElement(raiz, 'xNome').text = cliente.razao_social
etree.SubElement(raiz, 'IE').text = cliente.inscricao_estadual
# Endereço
endereco = etree.SubElement(raiz, 'enderDest')
etree.SubElement(endereco, 'xLgr').text = cliente.endereco_logradouro
etree.SubElement(endereco, 'nro').text = cliente.endereco_numero
etree.SubElement(endereco, 'xCpl').text = cliente.endereco_complemento
etree.SubElement(endereco, 'xBairro').text = cliente.endereco_bairro
etree.SubElement(endereco, 'cMun').text = cliente.endereco_municipio
etree.SubElement(endereco, 'xMun').text = obter_municipio_por_codigo(
cliente.endereco_municipio, cliente.endereco_uf,
)
etree.SubElement(endereco, 'UF').text = cliente.endereco_uf
etree.SubElement(endereco, 'CEP').text = so_numeros(cliente.endereco_cep)
etree.SubElement(endereco, 'cPais').text = cliente.endereco_pais
etree.SubElement(endereco, 'xPais').text = obter_pais_por_codigo(cliente.endereco_pais)
etree.SubElement(endereco, 'fone').text = cliente.endereco_telefone
if retorna_string:
return etree.tostring(raiz, pretty_print=True)
else:
return raiz
def _serializar_transportadora(self, transportadora, tag_raiz='transporta', retorna_string=True):
raiz = etree.Element(tag_raiz)
# Dados da transportadora
etree.SubElement(raiz, transportadora.tipo_documento).text = so_numeros(transportadora.numero_documento)
etree.SubElement(raiz, 'xNome').text = transportadora.razao_social
etree.SubElement(raiz, 'IE').text = transportadora.inscricao_estadual
# Endereço
etree.SubElement(raiz, 'xEnder').text = transportadora.endereco_logradouro
etree.SubElement(raiz, 'cMun').text = transportadora.endereco_municipio
etree.SubElement(raiz, 'xMun').text = obter_municipio_por_codigo(
transportadora.endereco_municipio, transportadora.endereco_uf,
)
etree.SubElement(raiz, 'UF').text = transportadora.endereco_uf
if retorna_string:
return etree.tostring(raiz, pretty_print=True)
else:
return raiz
def _serializar_entrega_retirada(self, entrega_retirada, tag_raiz='entrega', retorna_string=True):
raiz = etree.Element(tag_raiz)
# Dados da entrega/retirada
etree.SubElement(raiz, entrega_retirada.tipo_documento).text = so_numeros(entrega_retirada.numero_documento)
# Endereço
etree.SubElement(raiz, 'xLgr').text = entrega_retirada.endereco_logradouro
etree.SubElement(raiz, 'nro').text = entrega_retirada.endereco_numero
etree.SubElement(raiz, 'xCpl').text = entrega_retirada.endereco_complemento
etree.SubElement(raiz, 'xBairro').text = entrega_retirada.endereco_bairro
etree.SubElement(raiz, 'cMun').text = entrega_retirada.endereco_municipio
etree.SubElement(raiz, 'xMun').text = obter_municipio_por_codigo(
entrega_retirada.endereco_municipio, entrega_retirada.endereco_uf,
)
etree.SubElement(raiz, 'UF').text = entrega_retirada.endereco_uf
if retorna_string:
return etree.tostring(raiz, pretty_print=True)
else:
return raiz
def _serializar_produto_servico(self, produto_servico, tag_raiz='det', retorna_string=True):
raiz = etree.Element(tag_raiz)
# Produto
prod = etree.SubElement(raiz, 'prod')
etree.SubElement(prod, 'cProd').text = str(produto_servico.codigo)
etree.SubElement(prod, 'cEAN').text = produto_servico.ean
etree.SubElement(prod, 'xProd').text = produto_servico.descricao
etree.SubElement(prod, 'CFOP').text = produto_servico.cfop
etree.SubElement(prod, 'uCom').text = produto_servico.unidade_comercial
etree.SubElement(prod, 'qCom').text = str(produto_servico.quantidade_comercial or 0)
etree.SubElement(prod, 'vUnCom').text = str(produto_servico.valor_unitario_comercial or 0)
etree.SubElement(prod, 'vProd').text = str(produto_servico.valor_total_bruto or 0)
etree.SubElement(prod, 'cEANTrib').text = produto_servico.ean_tributavel
etree.SubElement(prod, 'uTrib').text = produto_servico.unidade_tributavel
etree.SubElement(prod, 'qTrib').text = str(produto_servico.quantidade_tributavel)
etree.SubElement(prod, 'vUnTrib').text = str(produto_servico.valor_unitario_tributavel)
# Imposto
imposto = etree.SubElement(raiz, 'imposto')
icms = etree.SubElement(imposto, 'ICMS')
icms_item = etree.SubElement(icms, 'ICMS'+produto_servico.icms_situacao_tributaria)
etree.SubElement(icms_item, 'orig').text = str(produto_servico.icms_origem)
etree.SubElement(icms_item, 'CST').text = produto_servico.icms_situacao_tributaria
etree.SubElement(icms_item, 'modBC').text = str(produto_servico.icms_modalidade_determinacao_bc)
etree.SubElement(icms_item, 'vBC').text = str(produto_servico.icms_valor_base_calculo)
etree.SubElement(icms_item, 'pICMS').text = str(produto_servico.icms_aliquota)
etree.SubElement(icms_item, 'vICMS').text = str(produto_servico.icms_valor)
pis = etree.SubElement(imposto, 'PIS')
pis_item = etree.SubElement(pis, 'PISAliq')
etree.SubElement(pis_item, 'CST').text = str(produto_servico.pis_situacao_tributaria)
etree.SubElement(pis_item, 'vBC').text = str(produto_servico.pis_valor_base_calculo)
etree.SubElement(pis_item, 'pPIS').text = str(produto_servico.pis_aliquota_percentual)
etree.SubElement(pis_item, 'vPIS').text = str(produto_servico.pis_valor)
cofins = etree.SubElement(imposto, 'COFINS')
cofins_item = etree.SubElement(cofins, 'COFINSAliq')
etree.SubElement(cofins_item, 'CST').text = str(produto_servico.cofins_situacao_tributaria)
etree.SubElement(cofins_item, 'vBC').text = str(produto_servico.cofins_valor_base_calculo)
etree.SubElement(cofins_item, 'pCOFINS').text = str(produto_servico.cofins_aliquota_percentual)
etree.SubElement(cofins_item, 'vCOFINS').text = str(produto_servico.cofins_valor)
if retorna_string:
return etree.tostring(raiz, pretty_print=True)
else:
return raiz
def _serializar_notas_fiscal(self, nota_fiscal, tag_raiz='infNFe', retorna_string=True):
raiz = etree.Element(tag_raiz, versao=self._versao)
# Dados da Nota Fiscal
ide = etree.SubElement(raiz, 'ide')
etree.SubElement(ide, 'cUF').text = CODIGOS_ESTADOS[nota_fiscal.uf]
etree.SubElement(ide, 'cNF').text = nota_fiscal.codigo_numerico_aleatorio
etree.SubElement(ide, 'natOp').text = nota_fiscal.natureza_operacao
etree.SubElement(ide, 'indPag').text = str(nota_fiscal.forma_pagamento)
etree.SubElement(ide, 'mod').text = str(nota_fiscal.modelo)
etree.SubElement(ide, 'serie').text = nota_fiscal.serie
etree.SubElement(ide, 'nNF').text = str(nota_fiscal.numero_nf)
etree.SubElement(ide, 'dEmi').text = nota_fiscal.data_emissao.strftime('%Y-%m-%d')
etree.SubElement(ide, 'dSaiEnt').text = nota_fiscal.data_saida_entrada.strftime('%Y-%m-%d')
etree.SubElement(ide, 'tpNF').text = str(nota_fiscal.tipo_documento)
etree.SubElement(ide, 'cMunFG').text = nota_fiscal.municipio
etree.SubElement(ide, 'tpImp').text = str(nota_fiscal.tipo_impressao_danfe)
etree.SubElement(ide, 'tpEmis').text = str(nota_fiscal.forma_emissao)
etree.SubElement(ide, 'cDV').text = nota_fiscal.dv_codigo_numerico_aleatorio
etree.SubElement(ide, 'tpAmb').text = str(self._ambiente)
etree.SubElement(ide, 'finNFe').text = str(nota_fiscal.finalidade_emissao)
etree.SubElement(ide, 'procEmi').text = str(nota_fiscal.processo_emissao)
etree.SubElement(ide, 'verProc').text = '%s %s'%(self._nome_aplicacao,
nota_fiscal.versao_processo_emissao)
# Emitente
raiz.append(self._serializar_emitente(nota_fiscal.emitente, retorna_string=False))
# Destinatário
raiz.append(self._serializar_cliente(nota_fiscal.cliente, retorna_string=False))
# Retirada
if nota_fiscal.retirada:
raiz.append(self._serializar_entrega_retirada(
nota_fiscal.retirada,
retorna_string=False,
tag_raiz='retirada',
))
# Entrega
if nota_fiscal.entrega:
raiz.append(self._serializar_entrega_retirada(
nota_fiscal.entrega,
retorna_string=False,
tag_raiz='entrega',
))
# Itens
for num, item in enumerate(nota_fiscal.produtos_e_servicos):
det = self._serializar_produto_servico(item, retorna_string=False)
det.attrib['nItem'] = str(num+1)
raiz.append(det)
# Totais
total = etree.SubElement(raiz, 'total')
icms_total = etree.SubElement(total, 'ICMSTot')
etree.SubElement(icms_total, 'vBC').text = str(nota_fiscal.totais_icms_base_calculo)
etree.SubElement(icms_total, 'vICMS').text = str(nota_fiscal.totais_icms_total)
etree.SubElement(icms_total, 'vBCST').text = str(nota_fiscal.totais_icms_st_base_calculo)
etree.SubElement(icms_total, 'vST').text = str(nota_fiscal.totais_icms_st_total)
etree.SubElement(icms_total, 'vProd').text = str(nota_fiscal.totais_icms_total_produtos_e_servicos)
etree.SubElement(icms_total, 'vFrete').text = str(nota_fiscal.totais_icms_total_frete)
etree.SubElement(icms_total, 'vSeg').text = str(nota_fiscal.totais_icms_total_seguro)
etree.SubElement(icms_total, 'vDesc').text = str(nota_fiscal.totais_icms_total_desconto)
etree.SubElement(icms_total, 'vII').text = str(nota_fiscal.totais_icms_total_ii)
etree.SubElement(icms_total, 'vIPI').text = str(nota_fiscal.totais_icms_total_ipi)
etree.SubElement(icms_total, 'vPIS').text = str(nota_fiscal.totais_icms_pis)
etree.SubElement(icms_total, 'vCOFINS').text = str(nota_fiscal.totais_icms_cofins)
etree.SubElement(icms_total, 'vOutro').text = str(nota_fiscal.totais_icms_outras_despesas_acessorias)
etree.SubElement(icms_total, 'vNF').text = str(nota_fiscal.totais_icms_total_nota)
# Transporte
transp = etree.SubElement(raiz, 'transp')
etree.SubElement(transp, 'modFrete').text = str(nota_fiscal.transporte_modalidade_frete)
# Transportadora
transp.append(self._serializar_transportadora(
nota_fiscal.transporte_transportadora,
retorna_string=False,
))
# Veículo
veiculo = etree.SubElement(transp, 'veicTransp')
etree.SubElement(veiculo, 'placa').text = nota_fiscal.transporte_veiculo_placa
etree.SubElement(veiculo, 'UF').text = nota_fiscal.transporte_veiculo_uf
etree.SubElement(veiculo, 'RNTC').text = nota_fiscal.transporte_veiculo_rntc
# Reboque
reboque = etree.SubElement(transp, 'reboque')
etree.SubElement(reboque, 'placa').text = nota_fiscal.transporte_reboque_placa
etree.SubElement(reboque, 'UF').text = nota_fiscal.transporte_reboque_uf
etree.SubElement(reboque, 'RNTC').text = nota_fiscal.transporte_reboque_rntc
# Volumes
for volume in nota_fiscal.transporte_volumes:
vol = etree.SubElement(transp, 'vol')
etree.SubElement(vol, 'qVol').text = str(volume.quantidade)
etree.SubElement(vol, 'esp').text = volume.especie
etree.SubElement(vol, 'marca').text = volume.marca
etree.SubElement(vol, 'nVol').text = volume.numeracao
etree.SubElement(vol, 'pesoL').text = str(volume.peso_liquido)
etree.SubElement(vol, 'pesoB').text = str(volume.peso_bruto)
# Lacres
lacres = etree.SubElement(vol, 'lacres')
for lacre in volume.lacres:
etree.SubElement(lacres, 'nLacre').text = lacre.numero_lacre
# Informações adicionais
info_ad = etree.SubElement(raiz, 'infAdic')
etree.SubElement(info_ad, 'infAdFisco').text = nota_fiscal.informacoes_adicionais_interesse_fisco
etree.SubElement(info_ad, 'infCpl').text = nota_fiscal.informacoes_complementares_interesse_contribuinte
# 'Id' da tag raiz
# Ex.: NFe35080599999090910270550010000000011518005123
raiz.attrib['Id'] = nota_fiscal.identificador_unico
if retorna_string:
return etree.tostring(raiz, pretty_print=True)
else:
return raiz
|
lgpl-3.0
| -4,061,621,420,979,028,000
| 48.895954
| 116
| 0.663925
| false
| 2.904441
| false
| false
| false
|
t3dev/odoo
|
addons/website/controllers/backend.py
|
5
|
2937
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import http
from odoo.http import request
from odoo.tools.translate import _
class WebsiteBackend(http.Controller):
@http.route('/website/fetch_dashboard_data', type="json", auth='user')
def fetch_dashboard_data(self, website_id, date_from, date_to):
Website = request.env['website']
has_group_system = request.env.user.has_group('base.group_system')
has_group_designer = request.env.user.has_group('website.group_website_designer')
dashboard_data = {
'groups': {
'system': has_group_system,
'website_designer': has_group_designer
},
'currency': request.env.user.company_id.currency_id.id,
'dashboards': {
'visits': {},
}
}
current_website = website_id and Website.browse(website_id) or Website.get_current_website()
multi_website = request.env.user.has_group('website.group_multi_website')
dashboard_data['websites'] = (multi_website and request.env['website'].search([]) or current_website).read(['id', 'name'])
for website in dashboard_data['websites']:
if website['id'] == current_website.id:
website['selected'] = True
if has_group_designer:
if current_website.google_management_client_id and current_website.google_analytics_key:
dashboard_data['dashboards']['visits'] = dict(
ga_client_id=current_website.google_management_client_id or '',
ga_analytics_key=current_website.google_analytics_key or '',
)
return dashboard_data
@http.route('/website/dashboard/set_ga_data', type='json', auth='user')
def website_set_ga_data(self, website_id, ga_client_id, ga_analytics_key):
if not request.env.user.has_group('base.group_system'):
return {
'error': {
'title': _('Access Error'),
'message': _('You do not have sufficient rights to perform that action.'),
}
}
if not ga_analytics_key or not ga_client_id.endswith('.apps.googleusercontent.com'):
return {
'error': {
'title': _('Incorrect Client ID / Key'),
'message': _('The Google Analytics Client ID or Key you entered seems incorrect.'),
}
}
Website = request.env['website']
current_website = website_id and Website.browse(website_id) or Website.get_current_website()
request.env['res.config.settings'].create({
'google_management_client_id': ga_client_id,
'google_analytics_key': ga_analytics_key,
'website_id': current_website.id,
}).execute()
return True
|
gpl-3.0
| -7,889,350,832,139,283,000
| 43.5
| 130
| 0.582567
| false
| 4.107692
| false
| false
| false
|
ctools/ctools
|
modules/comscripts/comlixfit.py
|
1
|
10461
|
#! /usr/bin/env python
# ==========================================================================
# Perform SRCLIX model fitting of COMPTEL observations
#
# Copyright (C) 2021 Juergen Knoedlseder
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ==========================================================================
import os
import sys
import gammalib
import ctools
# =============== #
# comlixfit class #
# =============== #
class comlixfit(ctools.cslikelihood):
"""
Perform SRCLIX model fitting of COMPTEL observations
"""
# Constructor
def __init__(self, *argv):
"""
Constructor
"""
# Initialise application by calling the base class constructor
self._init_cslikelihood(self.__class__.__name__, ctools.__version__, argv)
# Return
return
# Private methods
def _get_parameters(self):
"""
Get parameters from parfile
"""
# Set observation if not done before
if self.obs().is_empty():
self.obs().load(self['inobs'].filename())
# Set models if we have none
if self.obs().models().is_empty():
self.obs().models(self['inmodel'].filename())
# Query parameters
self['max_iter'].integer()
self['like_accuracy'].real()
self['fix_spat_for_ts'].boolean()
# Get parameters
bkgmethod = self['bkgmethod'].string()
nrunav = self['nrunav'].integer()
navgr = self['navgr'].integer()
nincl = self['nincl'].integer()
nexcl = self['nexcl'].integer()
# Check for incorrect parameters
if nexcl < 0 or nexcl >= nincl:
msg = 'Incorrect value %d for nexcl (bins to exclude).' % nexcl
raise RuntimeError(msg)
if nexcl != 0 and 2*int(nexcl/2) == nexcl :
msg = 'nexcl=%d (bins to exclude) should be zero or odd number.' % nexcl
raise RuntimeError(msg)
if nincl < 3 or 2*int(nincl/2) == nincl:
msg = 'nincl=%d (bins to include) should be odd and >= 3.' % nincl
raise RuntimeError(msg)
if navgr < 1 or 2*int(navgr/2) == navgr :
msg = 'navgr=%d should be odd and >= 1.' % navgr
raise RuntimeError(msg)
# Query ahead output model filename
if self._read_ahead():
self['suffix'].string()
self['outfolder'].string()
self['outobs'].filename()
self['outmodel'].filename()
# Write input parameters into logger
self._log_parameters(gammalib.TERSE)
# Return
return
def _update_obs(self):
"""
Update background model in observation container
The method updated the background model in the observation container
by taking into account the current source models in the BGDLIXA
model generation algorithm.
"""
# Get task parameters
bkgmethod = self['bkgmethod'].string()
nrunav = self['nrunav'].integer()
navgr = self['navgr'].integer()
nincl = self['nincl'].integer()
nexcl = self['nexcl'].integer()
# Extract source models from observation model container
models = gammalib.GModels()
for model in self.obs().models():
if model.classname() == 'GModelSky':
models.append(model)
# Loop over all observations
for obs in self.obs():
# Skip non-COMPTEL observations
if obs.classname() != 'GCOMObservation':
continue
# Compute DRM
drm = obs.drm(models)
# Compute background model
obs.compute_drb(bkgmethod, drm, nrunav, navgr, nincl, nexcl)
# Signal that DRB file was not yet saved
obs.drbname('')
# Return
return
def _final_model_fit(self):
"""
Perform final model fit using ctlike
"""
# Create instance of model fitting tool
like = ctools.ctlike(self.obs())
like['fix_spat_for_ts'] = self['fix_spat_for_ts'].boolean()
# Run ctlike
like.run()
# Recover results
self.opt(like.opt())
self.obs(like.obs())
# Return
return
def _get_obs_header(self, obs):
"""
Get observation header
"""
# Set header
header = obs.instrument() + ' observation'
# If observation name is not empty then add name
if obs.name() is not '':
header += ' \"' + obs.name() + '\"'
# If observation ID is not empty then add ID
if obs.id() is not '':
header += ' (id=' + obs.id() + ')'
# Return header
return header
# Public methods
def run(self):
"""
Run the script
"""
# Switch screen logging on in debug mode
if self._logDebug():
self._log.cout(True)
# Get parameters
self._get_parameters()
# Log header
self._log_header1(gammalib.NORMAL, 'Input observations')
# Log input observations
self._log_string(gammalib.NORMAL, str(self.obs()))
# Get parameters and initialise some variables
niter = self['max_iter'].integer()
eps = self['like_accuracy'].real()
delta = 0.0
# Write header
self._log_header1(gammalib.NORMAL,
'Iterative maximum likelihood model fitting')
# Loop over iterations
for iter in range(niter):
# Update observations
self._update_obs()
# Fit model
self.obs().optimize(self.opt())
# Compute logL difference after first iteration
if iter > 0:
delta = logL - self.opt().value()
# Store maximum likelihood value
logL = self.opt().value()
# Log maximum likelihood
if iter == 0:
result = '%.5f' % (logL)
else:
result = '%.5f (%.5f)' % (logL, delta)
self._log_value(gammalib.NORMAL, 'logL after iteration %d' % (iter+1),
result)
# Check for convergence
if iter > 0:
if delta < eps:
break
# Do final model fit
self._final_model_fit()
# Compute logL difference and store maximum likelihood value
delta = logL - self.opt().value()
logL = self.opt().value()
# Log final maximum likelihood
result = '%.5f (%.5f)' % (logL, delta)
self._log_value(gammalib.NORMAL, 'logL after final iteration',
result)
# Log header
self._log_header1(gammalib.NORMAL,
'Maximum likelihood optimisation results')
self._log_string(gammalib.NORMAL, str(self.opt()))
self._log_string(gammalib.NORMAL, str(self.obs().models()))
# Return
return
def save(self):
"""
Save observation definition file
"""
# Write header
self._log_header1(gammalib.TERSE, 'Save observations')
# Get output filenames
outobs = self['outobs'].filename()
outmodel = self['outmodel'].filename()
# If file exists and clobber flag is false then raise an exception
if outobs.exists() and not self['clobber'].boolean():
msg = ('Cannot save "'+outobs.url()+'": File already exists. '
'Use parameter clobber=yes to allow overwriting of files.')
raise RuntimeError(msg)
elif outmodel.exists() and not self['clobber'].boolean():
msg = ('Cannot save "'+outmodel.url()+'": File already exists. '
'Use parameter clobber=yes to allow overwriting of files.')
raise RuntimeError(msg)
# Otherwise log filename and save file
else:
# Get DRB file suffix and set outfolder
suffix = self['suffix'].string()
outfolder = self['outfolder'].string()
# Create outfolder directory
try:
os.makedirs(gammalib.expand_env(outfolder))
except OSError:
pass
# Loop over all observations
for obs in self.obs():
# Skip non-COMPTEL observations
if obs.classname() != 'GCOMObservation':
continue
# Store background filename
drename = '%s/%s' % (outfolder, os.path.basename(obs.drename().url()))
if suffix == '':
drbname = drename.replace('dre', 'drb')
else:
drbname = drename.replace('dre', 'drb-%s' % suffix)
obs.drbname(drbname)
# Save DRB file
obs.drb().save(drbname, self['clobber'].boolean())
# Log saving
self._log_value(gammalib.NORMAL, 'DRB file', drbname)
# Log observation definition filename
self._log_value(gammalib.NORMAL, 'Obs. definition XML file',
outobs.url())
# Save observations
self.obs().save(outobs)
# Log model definition filename
self._log_value(gammalib.NORMAL, 'Model definition XML file',
outmodel.url())
# Save models
self.obs().models().save(outmodel)
# Return
return
# ======================== #
# Main routine entry point #
# ======================== #
if __name__ == '__main__':
# Create instance of application
app = comlixfit(sys.argv)
# Execute application
app.execute()
|
gpl-3.0
| 556,582,991,032,804,600
| 30.414414
| 86
| 0.531402
| false
| 4.395378
| false
| false
| false
|
yudingding6197/fin_script
|
static_present.py
|
1
|
5506
|
#!/usr/bin/env python
# -*- coding:gbk -*-
import sys
import re
import os
import time
import string
import datetime
import tushare as ts
from internal.ts_common import *
from decimal import Decimal
today = datetime.date.today()
#˵Ã÷show_flag
#0£º²»»ñµÃÿһֻµÄÁ÷ͨÅÌ£¬²»»á¼ÆËã»»ÊÖÂÊ
#1£º»ñµÃÿһֻµÄÁ÷ͨÅÌ£¬²¢ÇÒ¼ÆËã»»ÊÖÂÊ
#2£ºÏÔʾÿһֻ×îеÄÐÂÎÅ£¬µ±ÌìµÄÐÂÎÅÈ«²¿ÏÔʾ£¬µ±ÌìûÓÐÖ»ÏÔʾһÌõnews
pindex = len(sys.argv)
LOOP_COUNT=0
st_today_base = None
while LOOP_COUNT<3:
try:
st_today_base = ts.get_today_all()
except:
LOOP_COUNT += 1
time.sleep(0.5)
else:
break;
if st_today_base is None:
print "Timeout to get stock basic info"
exit(0)
st_today = st_today_base.sort_values(['changepercent'], 0, False)
#new_st_list = list(st_today[st_today.changepercent>11]['code'])
new_st_list = []
for index,row in st_today.iterrows():
code = row[0].encode('gbk')
if row['changepercent']>11:
new_st_list.append(code)
print ''
#print new_st_list
LOOP_COUNT=0
st_bas = None
while LOOP_COUNT<3:
try:
st_bas = ts.get_stock_basics()
except:
LOOP_COUNT += 1
time.sleep(0.5)
else:
break;
if st_bas is None:
print "Timeout to get stock basic info"
exit(0)
st_pb_base = st_bas[st_bas.pb!=0]
st_pb_base = st_pb_base.sort_values(['timeToMarket'], 0, False)
st_index = st_pb_base.index
st_bas_list=list(st_index)
#st_bas.to_excel("a_stock_base.xlsx")
#st_pb_base.to_excel("a_stock_pb_base.xlsx")
#print st_pb_base.head(10)
st_list = []
for i in range(0, len(new_st_list)):
if new_st_list[i] in st_bas_list[0:10]:
pass
else:
st_list.append(new_st_list[i])
st_list.extend(st_bas_list)
'''
st_list = st_list[0:60]
st_list.append('300175')
st_list.append('603558')
#st_list=['600828','002819','300611']
#print st_list
'''
number = len(st_list)
if number<=0:
exit(0)
today_open = []
stcsItem=statisticsItem()
b_get_data = 1
#ZTÒ»´ÎÈ¡³ö base ¸ö
#½ØÈ¡list£¬Í¨¹ýÅäÖÃÆðʼλÖÃ
base = 23
loop_ct = number/base
if number%base!=0:
loop_ct += 1
pd_list = []
for i in range(0, loop_ct):
end_idx = min(base*(i+1), number)
cur_list = st_list[i*base:end_idx]
if len(cur_list)==0:
break
#print cur_list
excecount = 0
stdf = None
while excecount<5:
try:
stdf = ts.get_realtime_quotes(cur_list)
except:
print "Get except:"
time.sleep(0.5)
excecount += 1
if excecount<5:
continue
stdf = None
break
else:
break
if stdf is None:
print "Get list fail at:", cur_list
continue
#print stdf
yzcx_flag = 0
for index,row in stdf.iterrows():
stockInfo = []
code = cur_list[index]
index += 1
name = row[0]
pre_close = float(row['pre_close'])
price = float(row['price'])
#ͨ¹ý»ñµÃKÏßÊý¾Ý£¬ÅжÏÊÇ·ñYZZTйÉ
if b_get_data == 1:
#»ñµÃÿֻ¸ö¹ÉÿÌì½»Ò×Êý¾Ý
day_info_df = ts.get_k_data(code)
#print day_info_df
trade_days = len(day_info_df)
b_open=0
yzzt_day = 0
if trade_days==1:
stcsItem.s_new += 1
yzcx_flag = 1
for tdidx,tdrow in day_info_df.iterrows():
open = tdrow[1]
close = tdrow[2]
high = tdrow['high']
low = tdrow['low']
if high!=low:
if yzzt_day!=0:
if (yzzt_day+1)==trade_days:
chg_perc = round((price-pre_close)*100/pre_close,2)
open_list = [code, name, chg_perc, price, yzzt_day]
today_open.append(open_list)
b_open = 1
break
#µ±ZT´ò¿ª£¬¾Í»ábreak for Ñ»·
yzzt_day += 1
pre_close = close
if b_open==0:
dt_str=day_info_df.iloc[trade_days-1,0]
last_date = datetime.datetime.strptime(dt_str, '%Y-%m-%d').date()
cmp_delta = today-last_date
if cmp_delta.days==0:
stcsItem.s_cx_yzzt += 1
yzcx_flag = 1
#ÈÏΪYZZT²»»á³¬¹ý 33 ¸ö½»Ò×ÈÕ
if trade_days>33:
b_get_data = 0
stk_type = analyze_status(code, name, row, stcsItem, yzcx_flag, pd_list)
#if i>2:
# break
#if len(pd_list)>0:
# df_tdy = pd.DataFrame(pd_list)
# df_tdy1 = df_tdy.sort_values([0], 0, False)
str_opn = "[%d %d %d %d]" % (stcsItem.s_open_zt,stcsItem.s_close_zt,stcsItem.s_open_T_zt,stcsItem.s_dk_zt)
print "%4d-ZT %4d-DT %d-X %d--%s" % (stcsItem.s_zt,stcsItem.s_dt,stcsItem.s_new,stcsItem.s_yzzt, str_opn)
print "%4d-CG %4d-FT KD:[%s] %2d-YIN" %(stcsItem.s_zthl,stcsItem.s_dtft,','.join(stcsItem.lst_kd),stcsItem.s_zt_o_gt_c)
print "%4d(%4d) ZERO:%4d %4d(%4d)" %(stcsItem.s_open_sz, stcsItem.s_open_dz, stcsItem.s_open_pp, stcsItem.s_open_xd, stcsItem.s_open_dd)
print "%4d(%4d) ZERO:%4d %4d(%4d)" %(stcsItem.s_close_sz, stcsItem.s_close_dz, stcsItem.s_close_pp, stcsItem.s_close_xd, stcsItem.s_close_dd)
print "4%%:%4d %4d" %(stcsItem.s_high_zf,stcsItem.s_low_df)
#print today_open
str = ''
list = today_open
if len(list)>0:
print "CXKB:"
for i in range(0, len(list)):
itm_lst = list[i]
if itm_lst[2]>9.9:
str1 = "%s(%d, ZT), " % (itm_lst[1], itm_lst[4])
elif itm_lst[2]<-9.9:
str1 = "%s(%d, DT), " % (itm_lst[1], itm_lst[4])
else:
str1 = "%s(%d, %.2f%%), " % (itm_lst[1], itm_lst[4],itm_lst[2])
str += str1
print str
else:
print "CXKB:====="
print ''
str = ''
list = stcsItem.lst_nb
if len(list)>0:
print "NB:"
for i in range(0, len(list)):
itm_lst = list[i]
str1 = "%s(%.2f%%, %.2f%%), " % (itm_lst[1], itm_lst[2], itm_lst[4])
str += str1
print str
else:
print "NB:====="
print ''
str = ''
list = stcsItem.lst_jc
if len(list)>0:
print "JC:"
for i in range(0, len(list)):
itm_lst = list[i]
str1 = "%s(%.2f%%, %.2f%%), " % (itm_lst[1], itm_lst[2], itm_lst[4])
str += str1
print str
else:
print "JC:====="
#print '\n'.join(['%s:%s' % item for item in stcsItem.__dict__.items()])
|
gpl-2.0
| 7,300,797,230,191,250,000
| 22.732759
| 141
| 0.622594
| false
| 1.907831
| false
| false
| false
|
IfengAutomation/uitester
|
uitester/test_manager/rpc_server.py
|
1
|
6380
|
from socketserver import ThreadingTCPServer, StreamRequestHandler
import json
from threading import Thread
import logging
import queue
import traceback
logger = logging.getLogger('Tester')
Timeout = 120
Port = 11800
class RPCServer(ThreadingTCPServer):
def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
self.allow_reuse_address = True
super().__init__(server_address, RequestHandlerClass, bind_and_activate)
self._agents = {}
def add_agent(self, agent):
self._agents[agent.device_id] = agent
def rm_agent(self, device_id):
self._agents.pop(device_id)
def get_agent(self, device_id):
return self._agents.get(device_id)
class RPCHandler(StreamRequestHandler):
def __init__(self, request, client_address, server):
self.has_register = False
self.agent_proxy = None
super().__init__(request, client_address, server)
def handle(self):
logger.debug('RPCHandler: client handler start')
while True:
line = self.rfile.readline().decode().strip()
if len(line) == 0:
logger.debug('RPCHandler: client disconnected.')
break
try:
msg = RPCMessage.from_json(line)
if not self.has_register:
self.handle_register(msg)
elif msg.msg_type == RPCMessage.RPC_KILL_SIGNAL:
self.handle_unregister()
else:
self.handle_message(msg)
except Exception:
logger.debug('RPCHandler: catch exception\n%s' % traceback.format_exc())
continue
logger.debug('RPCHandler: client handler stop')
def handle_register(self, msg):
if msg.msg_type == RPCMessage.RPC_CALL and msg.name == 'register':
if len(msg.args) < 1:
res = self._make_error_msg()
self.wfile.write(res.to_bytes())
self.agent_proxy = RPCAgent()
self.agent_proxy.device_id = msg.args[0]
self.agent_proxy.wfile = self.wfile
self.agent_proxy.connection = self.connection
self.server.add_agent(self.agent_proxy)
self.has_register = True
self.wfile.write(self._make_ok_msg().to_bytes())
else:
self.wfile.write(self._make_error_msg().to_bytes())
def _make_ok_msg(self):
ok_msg = RPCMessage()
ok_msg.msg_type = RPCMessage.RPC_RESULT
ok_msg.args = [True]
ok_msg.name = 'ok'
return ok_msg
def _make_error_msg(self):
err_msg = RPCMessage()
err_msg.msg_type = RPCMessage.RPC_RESULT
err_msg.args = [False]
err_msg.name = 'error'
return err_msg
def handle_unregister(self):
self.server.rm_agent(self.agent_proxy.device_id)
self.connection.close()
self.agent_proxy.is_closed = True
def handle_message(self, msg):
self.agent_proxy.responses.put(msg)
class RPCAgent:
def __init__(self):
self.device_id = ''
self.is_closed = False
self.msg_id = 0
self.wfile = None
self.connection = None
self.responses = queue.Queue()
def call(self, name, *args, timeout=Timeout, **kwargs):
"""
kwargs:
1) version
version=1 use normal rpc call
version=2 use reflection rpc call
default is version=1
reflection rpc call:
name('call', 'call_static', 'new', 'delete') : reflection method name
*args:
'call' method need at least 2 arguments. 1)instance 2)method name 3)method arguments
'call_static' need at least 2 arguments. 1)class name 2)method name 3)method arguments
'new' need at least 1 argument. 1)class name 2)constructor arguments
'delete' need at least 1 argument. 1)instance
:return RemoteObject remote object contains 2 attr : hash and class . If remote object is android View,
it have attr :'res-id' and 'content-des'. If remote object is TextView ,it have attr 'text'.
Timeout:
RPC Call has 30 sec timeout by default. You'll get a TimeoutError after 30sec.
"""
self.msg_id += 1
msg = RPCMessage()
msg.msg_id = self.msg_id
msg.msg_type = RPCMessage.RPC_CALL
msg.name = name
msg.args = args
if 'version' in kwargs:
msg.version = kwargs['version']
self.wfile.write(msg.to_bytes())
try:
res = self.responses.get(timeout=timeout)
return res
except queue.Empty:
raise TimeoutError("RPC Call timeout")
@property
def closed(self):
return self.wfile.closed
def close(self):
msg = RPCMessage.get_kill_signal()
self.wfile.write(msg.to_bytes())
class RPCMessage:
RPC_CALL = 1
RPC_RESULT = 2
RPC_KILL_SIGNAL = 99
def __init__(self):
self.msg_type = None
self.msg_id = None
self.version = 1
self.name = None
self.args = []
@classmethod
def get_kill_signal(cls):
msg = cls()
msg.name = 'kill'
msg.msg_type = RPCMessage.RPC_KILL_SIGNAL
return msg
@classmethod
def from_json(cls, json_str):
msg_dict = json.loads(decode(json_str))
if type(msg_dict) is not dict:
raise TypeError('Json is not a dict, can\'t create rpc message')
instance = cls()
instance.__dict__ = msg_dict
return instance
def to_json(self):
return encode(json.dumps(self.__dict__))
def to_bytes(self):
return (self.to_json() + '\n').encode()
def get_server(port):
return RPCServer(('0.0.0.0', port), RPCHandler)
def start(port):
server = get_server(port)
t = Thread(target=server.serve_forever)
t.setDaemon(True)
t.start()
logger.debug('RPC Server started')
return server
def encode(msg):
"""转译,规则为:
% -> %e
\n -> %n
"""
msg = msg.replace("%", "%e")
msg = msg.replace("\n", "%n")
return msg
def decode(msg):
"""反转译,规则为:
%n -> \n
%e -> %
"""
msg = msg.replace("%n", "\n")
msg = msg.replace("%e", "%")
return msg
|
apache-2.0
| 5,114,537,863,154,361,000
| 28.133028
| 111
| 0.574331
| false
| 3.8184
| false
| false
| false
|
MasterScrat/PostMonitor
|
monitor.py
|
1
|
3168
|
#!/usr/bin/python
import sys
import json
import urllib
import urllib2
import time
import logging
from tinydb import TinyDB, where
from apscheduler.schedulers.blocking import BlockingScheduler
# Multiple Projects, each with multiple Events (release, blog post...), each with multiple Links (Reddit, HN, FB, Twitter...)
# A Record is a set of numbers related to a Link at a point in time.
# TODO
# - add FB support (requires auth!)
# - add Twitter support (https://github.com/bear/python-twitter)
# - add WP support
# - keep conf in DB
# - add schedueling info per event? plus default value per project?
# - generalise score/num_comments to array of metrics?
# - generalise target/section to dict of meta?
logging.basicConfig()
db = TinyDB('data/records.json')
sched = BlockingScheduler()
def main():
sched.add_job(get_records, 'interval', id='monitor', seconds=60, max_instances=1)
sched.start()
#get_records()
def get_records():
print sched.print_jobs()
conf = load_config()
timestamp = time.time()
print
print '===', conf['monitor_name'], '==='
for project in conf['projects']:
print
print '=', project['project_name'], '='
for event in project['events']:
print '[', event['event_name'], ']'
for url in event['urls']:
record = get_record(url)
record.timestamp = timestamp
record.project = project['project_name']
record.event = event['event_name']
record.url = url
db.insert(record.to_json())
print record
class Record:
def __init__(self, score=0, num_comments=0):
self.score = score
self.num_comments = num_comments
self.timestamp = 0
self.site = ''
self.project = ''
self.event = ''
self.url = ''
self.target = '' # TODO
self.section = '' # TODO
def __str__(self):
return self.site + ': ' + str(self.score) + ' points, ' + str(self.num_comments) + ' comments'
def to_json(self):
return json.loads(json.dumps(self, default=lambda o: o.__dict__))
def get_record(url):
if "reddit.com" in url:
if ".json" not in url:
url = url + '.json'
record = reddit_stats(url)
record.site = "Reddit"
elif "hacker-news.firebaseio.com" in url:
record = hn_stats(url)
record.site = "HackerNews"
elif "news.ycombinator.com" in url:
record = hn_stats('https://hacker-news.firebaseio.com/v0/item/' + url.split("=")[1] + '.json')
record.site = "HackerNews"
elif "api.github.com" in url:
record = gh_stats(url)
record.site = "GitHub"
else:
raise NameError('Unkown site URL ' + url)
return record
def reddit_stats(url):
data = json.loads(read_url(url))
data = data[0]['data']['children'][0]['data']
return Record(data['score'], data['num_comments'])
def hn_stats(url):
data = json.loads(read_url(url))
return Record(data['score'], data['descendants'])
def gh_stats(url):
data = json.loads(read_url(url))
return Record(data['watchers_count'], data['subscribers_count'])
def read_url(url):
hdr = { 'User-Agent' : 'PostMonitor' }
req = urllib2.Request(url, headers=hdr)
return urllib2.urlopen(req).read()
def load_config():
with open('conf.json', 'r') as f:
conf = json.loads(f.read())
f.closed
return conf
if __name__ == "__main__":
main()
|
mit
| -8,944,656,295,161,008,000
| 22.649254
| 125
| 0.665088
| false
| 2.966292
| false
| false
| false
|
all-of-us/raw-data-repository
|
rdr_service/genomic/genomic_job_components.py
|
1
|
134110
|
"""
Component Classes for Genomic Jobs
Components are assembled by the JobController for a particular Genomic Job
"""
import csv
import logging
import re
import pytz
from collections import deque, namedtuple
from copy import deepcopy
from dateutil.parser import parse
import sqlalchemy
from rdr_service import clock
from rdr_service.dao.bq_genomics_dao import bq_genomic_set_member_update, bq_genomic_gc_validation_metrics_update, \
bq_genomic_set_update, bq_genomic_file_processed_update, \
bq_genomic_manifest_file_update, bq_genomic_set_member_batch_update
from rdr_service.dao.code_dao import CodeDao
from rdr_service.genomic.genomic_data import GenomicQueryClass
from rdr_service.genomic.genomic_state_handler import GenomicStateHandler
from rdr_service.model.biobank_stored_sample import BiobankStoredSample
from rdr_service.model.code import Code
from rdr_service.model.participant_summary import ParticipantRaceAnswers, ParticipantSummary
from rdr_service.model.config_utils import get_biobank_id_prefix
from rdr_service.resource.generators.genomics import genomic_set_member_update, genomic_gc_validation_metrics_update, \
genomic_set_update, genomic_file_processed_update, genomic_manifest_file_update, genomic_set_member_batch_update
from rdr_service.services.jira_utils import JiraTicketHandler
from rdr_service.api_util import (
open_cloud_file,
copy_cloud_file,
delete_cloud_file,
list_blobs,
get_blob)
from rdr_service.model.genomics import (
GenomicSet,
GenomicSetMember,
GenomicGCValidationMetrics,
GenomicSampleContamination,
GenomicAW1Raw,
GenomicAW2Raw)
from rdr_service.participant_enums import (
WithdrawalStatus,
QuestionnaireStatus,
SampleStatus,
Race,
SuspensionStatus,
ParticipantCohort)
from rdr_service.genomic_enums import GenomicSetStatus, GenomicSetMemberStatus, GenomicValidationFlag, GenomicJob, \
GenomicWorkflowState, GenomicSubProcessStatus, GenomicSubProcessResult, GenomicManifestTypes, \
GenomicContaminationCategory, GenomicQcStatus, GenomicIncidentCode
from rdr_service.dao.genomics_dao import (
GenomicGCValidationMetricsDao,
GenomicSetMemberDao,
GenomicFileProcessedDao,
GenomicSetDao,
GenomicJobRunDao,
GenomicManifestFeedbackDao,
GenomicManifestFileDao,
GenomicAW1RawDao,
GenomicAW2RawDao)
from rdr_service.dao.biobank_stored_sample_dao import BiobankStoredSampleDao
from rdr_service.dao.site_dao import SiteDao
from rdr_service.dao.participant_summary_dao import ParticipantSummaryDao
from rdr_service.genomic.genomic_biobank_manifest_handler import (
create_and_upload_genomic_biobank_manifest_file,
)
from rdr_service.genomic.validation import (
GENOMIC_VALID_AGE,
)
from rdr_service.offline.sql_exporter import SqlExporter
from rdr_service.config import (
getSetting,
GENOMIC_CVL_RECONCILIATION_REPORT_SUBFOLDER,
CVL_W1_MANIFEST_SUBFOLDER,
CVL_W3_MANIFEST_SUBFOLDER,
GENOMIC_GEM_A1_MANIFEST_SUBFOLDER,
GENOMIC_GEM_A3_MANIFEST_SUBFOLDER,
GENOME_TYPE_ARRAY,
GENOME_TYPE_WGS,
GAE_PROJECT,
GENOMIC_AW3_ARRAY_SUBFOLDER,
GENOMIC_AW3_WGS_SUBFOLDER,
BIOBANK_AW2F_SUBFOLDER,
)
from rdr_service.code_constants import COHORT_1_REVIEW_CONSENT_YES_CODE
from sqlalchemy.orm import aliased
class GenomicFileIngester:
"""
This class ingests a file from a source GC bucket into the destination table
"""
def __init__(self, job_id=None,
job_run_id=None,
bucket=None,
archive_folder=None,
sub_folder=None,
_controller=None,
target_file=None):
self.controller = _controller
self.job_id = job_id
self.job_run_id = job_run_id
self.file_obj = None
self.file_queue = deque()
self.target_file = target_file
self.bucket_name = bucket
self.archive_folder_name = archive_folder
self.sub_folder_name = sub_folder
# Sub Components
self.file_validator = GenomicFileValidator(
job_id=self.job_id,
controller=self.controller
)
self.file_mover = GenomicFileMover(archive_folder=self.archive_folder_name)
self.metrics_dao = GenomicGCValidationMetricsDao()
self.file_processed_dao = GenomicFileProcessedDao()
self.member_dao = GenomicSetMemberDao()
self.job_run_dao = GenomicJobRunDao()
self.sample_dao = BiobankStoredSampleDao()
self.feedback_dao = GenomicManifestFeedbackDao()
self.manifest_dao = GenomicManifestFileDao()
def generate_file_processing_queue(self):
"""
Creates the list of files to be ingested in this run.
Ordering is currently arbitrary;
"""
# Check Target file is set.
# It will not be set in cron job, but will be set by tool when run manually
_manifest_file_id = None
try:
_manifest_file_id = self.controller.task_data.manifest_file.id
except AttributeError:
pass
if self.target_file is not None:
if self.controller.storage_provider is not None:
_blob = self.controller.storage_provider.get_blob(self.bucket_name, self.target_file)
else:
_blob = get_blob(self.bucket_name, self.target_file)
files = [(self.target_file, _blob.updated)]
else:
files = self._get_new_file_names_and_upload_dates_from_bucket()
if files == GenomicSubProcessResult.NO_FILES:
return files
else:
for file_data in files:
new_file_record = self.file_processed_dao.insert_file_record(
self.job_run_id,
f'{self.bucket_name}/{file_data[0]}',
self.bucket_name,
file_data[0].split('/')[-1],
upload_date=file_data[1],
manifest_file_id=_manifest_file_id)
# For BQ/PDR
bq_genomic_file_processed_update(new_file_record.id, project_id=self.controller.bq_project_id)
genomic_file_processed_update(new_file_record.id)
self.file_queue.append(new_file_record)
def _get_new_file_names_and_upload_dates_from_bucket(self):
"""
Searches the bucket for un-processed files.
:return: list of (filenames, upload_date) or NO_FILES result code
"""
# Setup date
timezone = pytz.timezone('Etc/Greenwich')
date_limit_obj = timezone.localize(self.controller.last_run_time)
# Look for new files with valid filenames
bucket = '/' + self.bucket_name
files = list_blobs(bucket, prefix=self.sub_folder_name)
files = [(s.name, s.updated) for s in files
if s.updated > date_limit_obj
and self.file_validator.validate_filename(s.name)]
if not files:
logging.info('No files in cloud bucket {}'.format(self.bucket_name))
return GenomicSubProcessResult.NO_FILES
return files
def generate_file_queue_and_do_ingestion(self):
"""
Main method of the ingestor component,
generates a queue and processes each file
:return: result code
"""
file_queue_result = self.generate_file_processing_queue()
if file_queue_result == GenomicSubProcessResult.NO_FILES:
logging.info('No files to process.')
return file_queue_result
else:
logging.info('Processing files in queue.')
results = []
while len(self.file_queue):
try:
ingestion_result = self._ingest_genomic_file(
self.file_queue[0])
file_ingested = self.file_queue.popleft()
results.append(ingestion_result == GenomicSubProcessResult.SUCCESS)
if ingestion_result:
ingestion_message = f'Ingestion attempt for {file_ingested.fileName}: {ingestion_result}'
if 'invalid' in ingestion_result.name.lower():
logging.warning(ingestion_message)
else:
logging.info(ingestion_message)
self.file_processed_dao.update_file_record(
file_ingested.id,
GenomicSubProcessStatus.COMPLETED,
ingestion_result
)
# For BQ/PDR
bq_genomic_file_processed_update(file_ingested.id, self.controller.bq_project_id)
genomic_file_processed_update(file_ingested.id)
except IndexError:
logging.info('No files left in file queue.')
return GenomicSubProcessResult.SUCCESS if all(results) \
else GenomicSubProcessResult.ERROR
def _ingest_genomic_file(self, file_obj):
"""
Reads a file object from bucket and inserts into DB
:param: file_obj: A genomic file object
:return: A GenomicSubProcessResultCode
"""
self.file_obj = file_obj
data_to_ingest = self._retrieve_data_from_path(self.file_obj.filePath)
if data_to_ingest == GenomicSubProcessResult.ERROR:
return GenomicSubProcessResult.ERROR
elif data_to_ingest:
logging.info(f'Ingesting data from {self.file_obj.fileName}')
logging.info("Validating file.")
self.file_validator.valid_schema = None
validation_result = self.file_validator.validate_ingestion_file(
filename=self.file_obj.fileName,
data_to_validate=data_to_ingest
)
if validation_result != GenomicSubProcessResult.SUCCESS:
return validation_result
ingestion_config = {
GenomicJob.AW1_MANIFEST: {
'method': self._ingest_aw1_manifest
},
GenomicJob.AW1F_MANIFEST: {
'method': self._ingest_aw1_manifest
},
GenomicJob.METRICS_INGESTION: {
'method': self._process_gc_metrics_data_for_insert
},
GenomicJob.GEM_A2_MANIFEST: {
'method': self._ingest_gem_a2_manifest
},
GenomicJob.GEM_METRICS_INGEST: {
'method': self._ingest_gem_metrics_manifest
},
GenomicJob.W2_INGEST: {
'method': self._ingest_cvl_w2_manifest
},
GenomicJob.AW4_ARRAY_WORKFLOW: {
'method': self._ingest_aw4_manifest
},
GenomicJob.AW4_WGS_WORKFLOW: {
'method': self._ingest_aw4_manifest
},
GenomicJob.AW1C_INGEST: {
'method': self._ingest_aw1c_manifest
},
GenomicJob.AW1CF_INGEST: {
'method': self._ingest_aw1c_manifest
},
GenomicJob.AW5_ARRAY_MANIFEST: {
'method': self._ingest_aw5_manifest
},
GenomicJob.AW5_WGS_MANIFEST: {
'method': self._ingest_aw5_manifest
},
}
ingestion_type = ingestion_config[self.job_id]['method']
return ingestion_type(data_to_ingest)
else:
logging.info("No data to ingest.")
return GenomicSubProcessResult.NO_FILES
@staticmethod
def get_aw1_manifest_column_mappings():
return {
'packageId': 'packageid',
'sampleId': 'sampleid',
'gcManifestBoxStorageUnitId': 'boxstorageunitid',
'gcManifestBoxPlateId': 'boxid/plateid',
'gcManifestWellPosition': 'wellposition',
'gcManifestParentSampleId': 'parentsampleid',
'collectionTubeId': 'collectiontubeid',
'gcManifestMatrixId': 'matrixid',
'gcManifestTreatments': 'treatments',
'gcManifestQuantity_ul': 'quantity(ul)',
'gcManifestTotalConcentration_ng_per_ul': 'totalconcentration(ng/ul)',
'gcManifestTotalDNA_ng': 'totaldna(ng)',
'gcManifestVisitDescription': 'visitdescription',
'gcManifestSampleSource': 'samplesource',
'gcManifestStudy': 'study',
'gcManifestTrackingNumber': 'trackingnumber',
'gcManifestContact': 'contact',
'gcManifestEmail': 'email',
'gcManifestStudyPI': 'studypi',
'gcManifestTestName': 'testname',
'gcManifestFailureMode': 'failuremode',
'gcManifestFailureDescription': 'failuremodedesc',
}
@staticmethod
def get_aw1_raw_column_mappings():
return {
"package_id": "packageid",
"biobankid_sample_id": "biobankidsampleid",
"box_storageunit_id": "boxstorageunitid",
"box_id_plate_id": "boxid/plateid",
"well_position": "wellposition",
"sample_id": "sampleid",
"parent_sample_id": "parentsampleid",
"collection_tube_id": "collectiontubeid",
"matrix_id": "matrixid",
"collection_date": "collectiondate",
"biobank_id": "biobankid",
"sex_at_birth": "sexatbirth",
"age": "age",
"ny_state": "nystate(y/n)",
"sample_type": "sampletype",
"treatments": "treatments",
"quantity": "quantity(ul)",
"total_concentration": "totalconcentration(ng/ul)",
"total_dna": "totaldna(ng)",
"visit_description": "visitdescription",
"sample_source": "samplesource",
"study": "study",
"tracking_number": "trackingnumber",
"contact": "contact",
"email": "email",
"study_pi": "studypi",
"test_name": "testname",
"failure_mode": "failuremode",
"failure_mode_desc": "failuremodedesc",
}
@staticmethod
def get_aw2_raw_column_mappings():
return {
"biobank_id": "biobankid",
"sample_id": "sampleid",
"biobankidsampleid": "biobankidsampleid",
"lims_id": "limsid",
"mean_coverage": "meancoverage",
"genome_coverage": "genomecoverage",
"aouhdr_coverage": "aouhdrcoverage",
"contamination": "contamination",
"sex_concordance": "sexconcordance",
"sex_ploidy": "sexploidy",
"aligned_q30_bases": "alignedq30bases",
"array_concordance": "arrayconcordance",
"processing_status": "processingstatus",
"notes": "notes",
"chipwellbarcode": "chipwellbarcode",
"call_rate": "callrate",
}
def _ingest_aw1_manifest(self, data):
"""
AW1 ingestion method: Updates the GenomicSetMember with AW1 data
If the row is determined to be a control sample,
insert a new GenomicSetMember with AW1 data
:param data:
:param _site: gc_site ID
:return: result code
"""
_state = GenomicWorkflowState.AW0
_site = self._get_site_from_aw1()
for row in data['rows']:
row_copy = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row], row.values()))
row_copy['site_id'] = _site
# TODO: Disabling this fix but leaving in
# Until verified that this issue has been fixed in manifes
# Fix for invalid parent sample values
# try:
# parent_sample_id = int(row_copy['parentsampleid'])
# except ValueError:
# parent_sample_id = 0
# Skip rows if biobank_id is an empty string (row is empty well)
if row_copy['biobankid'] == "":
continue
# Check if this sample has a control sample parent tube
control_sample_parent = self.member_dao.get_control_sample_parent(
row_copy['testname'],
int(row_copy['parentsampleid'])
)
if control_sample_parent:
logging.warning(f"Control sample found: {row_copy['parentsampleid']}")
# Check if the control sample member exists for this GC, BID, collection tube, and sample ID
# Since the Biobank is reusing the sample and collection tube IDs (which are supposed to be unique)
cntrl_sample_member = self.member_dao.get_control_sample_for_gc_and_genome_type(
_site,
row_copy['testname'],
row_copy['biobankid'],
row_copy['collectiontubeid'],
row_copy['sampleid']
)
if not cntrl_sample_member:
# Insert new GenomicSetMember record if none exists
# for this control sample, genome type, and gc site
member = self.create_new_member_from_aw1_control_sample(row_copy)
# Update member for PDR
bq_genomic_set_member_update(member.id, project_id=self.controller.bq_project_id)
genomic_set_member_update(member.id)
# Skip rest of iteration and go to next row
continue
# Find the existing GenomicSetMember
# Set the member based on collection tube ID
# row_copy['testname'] is the genome type (i.e. aou_array, aou_wgs)
member = self.member_dao.get_member_from_collection_tube(row_copy['collectiontubeid'],
row_copy['testname'])
# Since member not found, and not a control sample,
# check if collection tube id was swapped by Biobank
if member is None:
bid = row_copy['biobankid']
# Strip biobank prefix if it's there
if bid[0] in [get_biobank_id_prefix(), 'T']:
bid = bid[1:]
member = self.member_dao.get_member_from_biobank_id_in_state(bid,
row_copy['testname'],
_state)
# If member found, validate new collection tube ID, set collection tube ID
if member:
if self._validate_collection_tube_id(row_copy['collectiontubeid'], bid):
with self.member_dao.session() as session:
self._record_sample_as_contaminated(session, member.collectionTubeId)
member.collectionTubeId = row_copy['collectiontubeid']
else:
# Couldn't find genomic set member based on either biobank ID or collection tube
_message = f"{self.job_id.name}: Cannot find genomic set member: " \
f"collection_tube_id: {row_copy['collectiontubeid']}, "\
f"biobank id: {bid}, "\
f"genome type: {row_copy['testname']}"
self.controller.create_incident(source_job_run_id=self.job_run_id,
source_file_processed_id=self.file_obj.id,
code=GenomicIncidentCode.UNABLE_TO_FIND_MEMBER.name,
message=_message,
biobank_id=bid,
collection_tube_id=row_copy['collectiontubeid'],
sample_id=row_copy['sampleid'],
)
# Skip rest of iteration and continue processing file
continue
# Process the attribute data
member_changed, member = self._process_aw1_attribute_data(row_copy, member)
if member_changed:
self.member_dao.update(member)
# Update member for PDR
bq_genomic_set_member_update(member.id, project_id=self.controller.bq_project_id)
genomic_set_member_update(member.id)
return GenomicSubProcessResult.SUCCESS
def load_raw_awn_file(self):
"""
Loads genomic_aw1_raw/genomic_aw2_raw
with raw data from aw1/aw2 file
:return:
"""
# Set manifest-specific variables
if self.controller.job_id == GenomicJob.LOAD_AW1_TO_RAW_TABLE:
dao = GenomicAW1RawDao()
awn_model = GenomicAW1Raw
columns = self.get_aw1_raw_column_mappings()
elif self.controller.job_id == GenomicJob.LOAD_AW2_TO_RAW_TABLE:
dao = GenomicAW2RawDao()
awn_model = GenomicAW2Raw
columns = self.get_aw2_raw_column_mappings()
else:
logging.error("Job ID not LOAD_AW1_TO_RAW_TABLE or LOAD_AW2_TO_RAW_TABLE")
return GenomicSubProcessResult.ERROR
# look up if any rows exist already for the file
records = dao.get_from_filepath(self.target_file)
if records:
logging.warning(f'File already exists in raw table: {self.target_file}')
return GenomicSubProcessResult.SUCCESS
file_data = self._retrieve_data_from_path(self.target_file)
# Return the error status if there is an error in file_data
if not isinstance(file_data, dict):
return file_data
# Processing raw data in batches
batch_size = 100
item_count = 0
batch = list()
for row in file_data['rows']:
# Standardize fields to lower, no underscores or spaces
row = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row], row.values()))
row_obj = self._set_raw_awn_attributes(row, awn_model(), columns)
batch.append(row_obj)
item_count += 1
if item_count == batch_size:
# Insert batch into DB
with dao.session() as session:
session.bulk_save_objects(batch)
# Reset batch
item_count = 0
batch = list()
if item_count:
# insert last batch if needed
with dao.session() as session:
session.bulk_save_objects(batch)
return GenomicSubProcessResult.SUCCESS
def ingest_single_aw1_row_for_member(self, member):
# Open file and pull row based on member.biobankId
with self.controller.storage_provider.open(self.target_file, 'r') as aw1_file:
reader = csv.DictReader(aw1_file, delimiter=',')
row = [r for r in reader if r['BIOBANK_ID'][1:] == str(member.biobankId)][0]
# Alter field names to remove spaces and change to lower case
row = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row], row.values()))
ingested_before = member.reconcileGCManifestJobRunId is not None
# Write AW1 data to genomic_set_member table
gc_manifest_column_mappings = self.get_aw1_manifest_column_mappings()
# Set attributes from file
for key in gc_manifest_column_mappings.keys():
try:
member.__setattr__(key, row[gc_manifest_column_mappings[key]])
except KeyError:
member.__setattr__(key, None)
# Set other fields not in AW1 file
member.reconcileGCManifestJobRunId = self.job_run_id
member.aw1FileProcessedId = self.file_obj.id
member.gcSite = self._get_site_from_aw1()
# Only update the member's genomicWorkflowState if it was AW0
if member.genomicWorkflowState == GenomicWorkflowState.AW0:
member.genomicWorkflowState = GenomicWorkflowState.AW1
member.genomicWorkflowStateModifiedTime = clock.CLOCK.now()
# Update member in DB
self.member_dao.update(member)
# Update AW1 manifest record count
if not ingested_before and not self.controller.bypass_record_count:
self.increment_manifest_file_record_count_from_id()
return GenomicSubProcessResult.SUCCESS
def ingest_single_aw2_row_for_member(self, member: GenomicSetMember) -> GenomicSubProcessResult:
# Open file and pull row based on member.biobankId
with self.controller.storage_provider.open(self.target_file, 'r') as aw1_file:
reader = csv.DictReader(aw1_file, delimiter=',')
row = [r for r in reader if r['Biobank ID'] == str(member.biobankId)][0]
# Alter field names to remove spaces and change to lower case
row = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row], row.values()))
# Beging prep aw2 row
row = self.prep_aw2_row_attributes(row, member)
if row == GenomicSubProcessResult.ERROR:
return GenomicSubProcessResult.ERROR
# check whether metrics object exists for that member
existing_metrics_obj = self.metrics_dao.get_metrics_by_member_id(member.id)
if existing_metrics_obj is not None:
metric_id = existing_metrics_obj.id
else:
metric_id = None
upserted_obj = self.metrics_dao.upsert_gc_validation_metrics_from_dict(row, metric_id)
# Update GC Metrics for PDR
if upserted_obj:
bq_genomic_gc_validation_metrics_update(upserted_obj.id, project_id=self.controller.bq_project_id)
genomic_gc_validation_metrics_update(upserted_obj.id)
self.update_member_for_aw2(member)
# Update member in DB
self.member_dao.update(member)
# Update AW1 manifest feedback record count
if existing_metrics_obj is None and not self.controller.bypass_record_count:
# For feedback manifest loop
# Get the genomic_manifest_file
manifest_file = self.file_processed_dao.get(member.aw1FileProcessedId)
if manifest_file is not None:
self.feedback_dao.increment_feedback_count(manifest_file.genomicManifestFileId,
_project_id=self.controller.bq_project_id)
return GenomicSubProcessResult.SUCCESS
def increment_manifest_file_record_count_from_id(self):
"""
Increments the manifest record count by 1
"""
manifest_file = self.manifest_dao.get(self.file_obj.genomicManifestFileId)
manifest_file.recordCount += 1
with self.manifest_dao.session() as s:
s.merge(manifest_file)
bq_genomic_manifest_file_update(manifest_file.id, project_id=self.controller.bq_project_id)
genomic_manifest_file_update(manifest_file.id)
def prep_aw2_row_attributes(self, row: dict, member: GenomicSetMember):
"""
Set contamination, contamination category,
call rate, member_id, and file_id on AW2 row dictionary
:param member:
:param row:
:return: row dictionary or ERROR code
"""
row['member_id'] = member.id
row['file_id'] = self.file_obj.id
# Truncate call rate
try:
row['callrate'] = row['callrate'][:10]
except KeyError:
pass
# Convert blank alignedq30bases to none
try:
if row['alignedq30bases'] == '':
row['alignedq30bases'] = None
except KeyError:
pass
# Validate and clean contamination data
try:
row['contamination'] = float(row['contamination'])
# Percentages shouldn't be less than 0
if row['contamination'] < 0:
row['contamination'] = 0
except ValueError:
if row['processingstatus'].lower() != 'pass':
return row
_message = f'{self.job_id.name}: Contamination must be a number for sample_id: {row["sampleid"]}'
self.controller.create_incident(source_job_run_id=self.job_run_id,
source_file_processed_id=self.file_obj.id,
code=GenomicIncidentCode.DATA_VALIDATION_FAILED.name,
message=_message,
biobank_id=member.biobankId,
sample_id=row['sampleid'],
)
return GenomicSubProcessResult.ERROR
# Calculate contamination_category
contamination_value = float(row['contamination'])
category = self.calculate_contamination_category(member.collectionTubeId,
contamination_value, member)
row['contamination_category'] = category
return row
def update_member_for_aw2(self, member: GenomicSetMember):
"""
Updates the aw2FileProcessedId and possibly the genomicWorkflowState
of a GenomicSetMember after AW2 data has been ingested
:param member:
"""
member.aw2FileProcessedId = self.file_obj.id
# Only update the state if it was AW1
if member.genomicWorkflowState == GenomicWorkflowState.AW1:
member.genomicWorkflowState = GenomicWorkflowState.AW2
member.genomicWorkflowStateModifiedTime = clock.CLOCK.now()
self.member_dao.update(member)
def _ingest_gem_a2_manifest(self, file_data):
"""
Processes the GEM A2 manifest file data
Updates GenomicSetMember object with gem_pass field.
:return: Result Code
"""
try:
for row in file_data['rows']:
sample_id = row['sample_id']
member = self.member_dao.get_member_from_sample_id_with_state(sample_id,
GENOME_TYPE_ARRAY,
GenomicWorkflowState.A1)
if member is None:
logging.warning(f'Invalid sample ID: {sample_id}')
continue
member.gemPass = row['success']
member.gemA2ManifestJobRunId = self.job_run_id
member.gemDateOfImport = parse(row['date_of_import'])
_signal = 'a2-gem-pass' if member.gemPass.lower() == 'y' else 'a2-gem-fail'
# update state and state modifed time only if changed
if member.genomicWorkflowState != GenomicStateHandler.get_new_state(
member.genomicWorkflowState, signal=_signal):
member.genomicWorkflowState = GenomicStateHandler.get_new_state(
member.genomicWorkflowState,
signal=_signal)
member.genomicWorkflowStateModifiedTime = clock.CLOCK.now()
self.member_dao.update(member)
# Update member for PDR
bq_genomic_set_member_update(member.id, project_id=self.controller.bq_project_id)
genomic_set_member_update(member.id)
return GenomicSubProcessResult.SUCCESS
except (RuntimeError, KeyError):
return GenomicSubProcessResult.ERROR
def _ingest_gem_metrics_manifest(self, file_data):
"""
Processes the GEM Metrics manifest file data
Updates GenomicSetMember object with metrics fields.
:return: Result Code
"""
try:
for row in file_data['rows']:
sample_id = row['sample_id']
member = self.member_dao.get_member_from_sample_id_with_state(sample_id,
GENOME_TYPE_ARRAY,
GenomicWorkflowState.GEM_RPT_READY)
if member is None:
logging.warning(f'Invalid sample ID: {sample_id}')
continue
member.gemMetricsAncestryLoopResponse = row['ancestry_loop_response']
member.gemMetricsAvailableResults = row['available_results']
member.gemMetricsResultsReleasedAt = row['results_released_at']
member.colorMetricsJobRunID = self.job_run_id
self.member_dao.update(member)
# Update member for PDR
bq_genomic_set_member_update(member.id, project_id=self.controller.bq_project_id)
genomic_set_member_update(member.id)
return GenomicSubProcessResult.SUCCESS
except (RuntimeError, KeyError):
return GenomicSubProcessResult.ERROR
def _ingest_aw4_manifest(self, file_data):
"""
Processes the AW4 manifest file data
:param file_data:
:return:
"""
try:
for row in file_data['rows']:
row_copy = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row], row.values()))
sample_id = row_copy['sampleid']
genome_type = GENOME_TYPE_ARRAY \
if self.job_id == GenomicJob.AW4_ARRAY_WORKFLOW else GENOME_TYPE_WGS
member = self.member_dao.get_member_from_aw3_sample(sample_id,
genome_type)
if member is None:
logging.warning(f'Invalid sample ID: {sample_id}')
continue
member.aw4ManifestJobRunID = self.job_run_id
member.qcStatus = self._get_qc_status_from_value(row_copy['qcstatus'])
metrics = self.metrics_dao.get_metrics_by_member_id(member.id)
if metrics:
metrics.drcSexConcordance = row_copy['drcsexconcordance']
if self.job_id == GenomicJob.AW4_ARRAY_WORKFLOW:
metrics.drcCallRate = row_copy['drccallrate']
elif self.job_id == GenomicJob.AW4_WGS_WORKFLOW:
metrics.drcContamination = row_copy['drccontamination']
metrics.drcMeanCoverage = row_copy['drcmeancoverage']
metrics.drcFpConcordance = row_copy['drcfpconcordance']
metrics_obj = self.metrics_dao.upsert(metrics)
bq_genomic_gc_validation_metrics_update(metrics_obj.id, project_id=self.controller.bq_project_id)
genomic_gc_validation_metrics_update(metrics_obj.id)
self.member_dao.update(member)
# Update member for PDR
bq_genomic_set_member_update(member.id, project_id=self.controller.bq_project_id)
genomic_set_member_update(member.id)
return GenomicSubProcessResult.SUCCESS
except (RuntimeError, KeyError):
return GenomicSubProcessResult.ERROR
def _retrieve_data_from_path(self, path):
"""
Retrieves the last genomic data file from a bucket
:param path: The source file to ingest
:return: CSV data as a dictionary
"""
try:
filename = path.split('/')[1]
logging.info(
'Opening CSV file from queue {}: {}.'
.format(path.split('/')[1], filename)
)
if self.controller.storage_provider:
with self.controller.storage_provider.open(path, 'r') as csv_file:
return self._read_data_to_ingest(csv_file)
else:
with open_cloud_file(path) as csv_file:
return self._read_data_to_ingest(csv_file)
except FileNotFoundError:
logging.error(f"File path '{path}' not found")
return GenomicSubProcessResult.ERROR
@staticmethod
def _read_data_to_ingest(csv_file):
data_to_ingest = {'rows': []}
csv_reader = csv.DictReader(csv_file, delimiter=",")
data_to_ingest['fieldnames'] = csv_reader.fieldnames
for row in csv_reader:
for key in row:
if not key:
del row[key]
data_to_ingest['rows'].append(row)
return data_to_ingest
def _process_aw1_attribute_data(self, aw1_data, member):
"""
Checks a GenomicSetMember object for changes provided by AW1 data
And mutates the GenomicSetMember object if necessary
:param aw1_data: dict
:param member: GenomicSetMember
:return: (boolean, GenomicSetMember)
"""
# Check if the member needs updating
if self._test_aw1_data_for_member_updates(aw1_data, member):
member = self._set_member_attributes_from_aw1(aw1_data, member)
member = self._set_rdr_member_attributes_for_aw1(aw1_data, member)
return True, member
return False, member
def _test_aw1_data_for_member_updates(self, aw1_data, member):
"""
Checks each attribute provided by Biobank
for changes to GenomicSetMember Object
:param aw1_data: dict
:param member: GenomicSetMember
:return: boolean (true if member requires updating)
"""
gc_manifest_column_mappings = self.get_aw1_manifest_column_mappings()
member_needs_updating = False
# Iterate each value and test whether the strings for each field correspond
for key in gc_manifest_column_mappings.keys():
if str(member.__getattribute__(key)) != str(aw1_data.get(gc_manifest_column_mappings[key])):
member_needs_updating = True
return member_needs_updating
def _set_member_attributes_from_aw1(self, aw1_data, member):
"""
Mutates the GenomicSetMember attributes provided by the Biobank
:param aw1_data: dict
:param member: GenomicSetMember
:return: GenomicSetMember
"""
gc_manifest_column_mappings = self.get_aw1_manifest_column_mappings()
for key in gc_manifest_column_mappings.keys():
member.__setattr__(key, aw1_data.get(gc_manifest_column_mappings[key]))
return member
def _set_rdr_member_attributes_for_aw1(self, aw1_data, member):
"""
Mutates the GenomicSetMember RDR attributes not provided by the Biobank
:param aw1_data: dict
:param member: GenomicSetMember
:return: GenomicSetMember
"""
# Set job run and file processed IDs
member.reconcileGCManifestJobRunId = self.job_run_id
# Don't overwrite aw1_file_processed_id when ingesting an AW1F
if self.job_id == GenomicJob.AW1_MANIFEST:
member.aw1FileProcessedId = self.file_obj.id
# Set the GC site ID (sourced from file-name)
member.gcSiteId = aw1_data['site_id']
# Only update the state if it was AW0 or AW1 (if in failure manifest workflow)
# We do not want to regress a state for reingested data
state_to_update = GenomicWorkflowState.AW0
if self.controller.job_id == GenomicJob.AW1F_MANIFEST:
state_to_update = GenomicWorkflowState.AW1
if member.genomicWorkflowState == state_to_update:
_signal = "aw1-reconciled"
# Set the signal for a failed sample
if aw1_data['failuremode'] is not None and aw1_data['failuremode'] != '':
_signal = 'aw1-failed'
member.genomicWorkflowState = GenomicStateHandler.get_new_state(
member.genomicWorkflowState,
signal=_signal)
member.genomicWorkflowStateModifiedTime = clock.CLOCK.now()
return member
def _set_raw_awn_attributes(self, awn_data, awn_row_obj, columns):
"""
Loads GenomicAW1Raw and GenomicAW2Raw attributes from awn_data
:param awn_data: dict
:param awn_row_obj: GenomicAW1Raw/GenomicAW2Raw object
:param mapping_function: function that returns column mappings
:return: GenomicAW1Raw or GenomicAW2Raw
"""
awn_row_obj.file_path = self.target_file
awn_row_obj.created = clock.CLOCK.now()
awn_row_obj.modified = clock.CLOCK.now()
for key in columns.keys():
awn_row_obj.__setattr__(key, awn_data.get(columns[key]))
return awn_row_obj
def _process_gc_metrics_data_for_insert(self, data_to_ingest):
""" Since input files vary in column names,
this standardizes the field-names before passing to the bulk inserter
:param data_to_ingest: stream of data in dict format
:return result code
"""
# iterate over each row from CSV and insert into gc metrics table
for row in data_to_ingest['rows']:
# change all key names to lower
row_copy = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row],
row.values()))
genome_type = self.file_validator.genome_type
member = self.member_dao.get_member_from_sample_id(
int(row_copy['sampleid']),
genome_type
)
if member is not None:
row_copy = self.prep_aw2_row_attributes(row_copy, member)
if row_copy == GenomicSubProcessResult.ERROR:
continue
# check whether metrics object exists for that member
existing_metrics_obj = self.metrics_dao.get_metrics_by_member_id(member.id)
if existing_metrics_obj is not None:
if self.controller.skip_updates:
# when running tool, updates can be skipped
continue
else:
metric_id = existing_metrics_obj.id
else:
metric_id = None
upserted_obj = self.metrics_dao.upsert_gc_validation_metrics_from_dict(row_copy, metric_id)
# Update GC Metrics for PDR
if upserted_obj:
bq_genomic_gc_validation_metrics_update(upserted_obj.id, project_id=self.controller.bq_project_id)
genomic_gc_validation_metrics_update(upserted_obj.id)
self.update_member_for_aw2(member)
# For feedback manifest loop
# Get the genomic_manifest_file
manifest_file = self.file_processed_dao.get(member.aw1FileProcessedId)
if manifest_file is not None and existing_metrics_obj is None:
self.feedback_dao.increment_feedback_count(manifest_file.genomicManifestFileId,
_project_id=self.controller.bq_project_id)
else:
bid = row_copy['biobankid']
if bid[0] in [get_biobank_id_prefix(), 'T']:
bid = bid[1:]
# Couldn't find genomic set member based on either biobank ID or sample ID
_message = f"{self.job_id.name}: Cannot find genomic set member for bid, sample_id: "\
f"{row_copy['biobankid']}, {row_copy['sampleid']}"
self.controller.create_incident(source_job_run_id=self.job_run_id,
source_file_processed_id=self.file_obj.id,
code=GenomicIncidentCode.UNABLE_TO_FIND_MEMBER.name,
message=_message,
biobank_id=bid,
sample_id=row_copy['sampleid'],
)
return GenomicSubProcessResult.SUCCESS
def _ingest_cvl_w2_manifest(self, file_data):
"""
Processes the CVL W2 manifest file data
:return: Result Code
"""
try:
for row in file_data['rows']:
# change all key names to lower
row_copy = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row],
row.values()))
biobank_id = row_copy['biobankid']
member = self.member_dao.get_member_from_biobank_id(biobank_id, GENOME_TYPE_WGS)
if member is None:
logging.warning(f'Invalid Biobank ID: {biobank_id}')
continue
member.genomeType = row_copy['testname']
member.cvlW2ManifestJobRunID = self.job_run_id
# update state and state modifed time only if changed
if member.genomicWorkflowState != GenomicStateHandler.get_new_state(
member.genomicWorkflowState, signal='w2-ingestion-success'):
member.genomicWorkflowState = GenomicStateHandler.get_new_state(
member.genomicWorkflowState,
signal='w2-ingestion-success')
member.genomicWorkflowStateModifiedTime = clock.CLOCK.now()
self.member_dao.update(member)
# Update member for PDR
bq_genomic_set_member_update(member.id, project_id=self.controller.bq_project_id)
genomic_set_member_update(member.id)
return GenomicSubProcessResult.SUCCESS
except (RuntimeError, KeyError):
return GenomicSubProcessResult.ERROR
def _ingest_aw5_manifest(self, file_data):
try:
for row in file_data['rows']:
row_copy = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row], row.values()))
biobank_id = row_copy['biobankid']
biobank_id = biobank_id[1:] if biobank_id[0].isalpha() else biobank_id
sample_id = row_copy['sampleid']
member = self.member_dao.get_member_from_biobank_id_and_sample_id(biobank_id, sample_id,
self.file_validator.genome_type)
if not member:
logging.warning(f'Can not find genomic member record for biobank_id: '
f'{biobank_id} and sample_id: {sample_id}, skipping...')
continue
existing_metrics_obj = self.metrics_dao.get_metrics_by_member_id(member.id)
if existing_metrics_obj is not None:
metric_id = existing_metrics_obj.id
else:
logging.warning(f'Can not find metrics record for member id: '
f'{member.id}, skipping...')
continue
updated_obj = self.metrics_dao.update_gc_validation_metrics_deleted_flags_from_dict(row_copy,
metric_id)
# Update GC Metrics for PDR
if updated_obj:
bq_genomic_gc_validation_metrics_update(updated_obj.id, project_id=self.controller.bq_project_id)
genomic_gc_validation_metrics_update(updated_obj.id)
return GenomicSubProcessResult.SUCCESS
except (RuntimeError, KeyError):
return GenomicSubProcessResult.ERROR
def _ingest_aw1c_manifest(self, file_data):
"""
Processes the CVL AW1C manifest file data
:return: Result Code
"""
try:
for row in file_data['rows']:
row_copy = dict(zip([key.lower().replace(' ', '').replace('_', '')
for key in row], row.values()))
collection_tube_id = row_copy['collectiontubeid']
member = self.member_dao.get_member_from_collection_tube(collection_tube_id, GENOME_TYPE_WGS)
if member is None:
# Currently ignoring invalid cases
logging.warning(f'Invalid collection tube ID: {collection_tube_id}')
continue
# Update the AW1C job run ID and genome_type
member.cvlAW1CManifestJobRunID = self.job_run_id
member.genomeType = row_copy['testname']
# Handle genomic state
_signal = "aw1c-reconciled"
if row_copy['failuremode'] not in (None, ''):
member.gcManifestFailureMode = row_copy['failuremode']
member.gcManifestFailureDescription = row_copy['failuremodedesc']
_signal = 'aw1c-failed'
# update state and state modifed time only if changed
if member.genomicWorkflowState != GenomicStateHandler.get_new_state(
member.genomicWorkflowState, signal=_signal):
member.genomicWorkflowState = GenomicStateHandler.get_new_state(
member.genomicWorkflowState,
signal=_signal)
member.genomicWorkflowStateModifiedTime = clock.CLOCK.now()
self.member_dao.update(member)
# Update member for PDR
bq_genomic_set_member_update(member.id, project_id=self.controller.bq_project_id)
genomic_set_member_update(member.id)
return GenomicSubProcessResult.SUCCESS
except (RuntimeError, KeyError):
return GenomicSubProcessResult.ERROR
def _get_site_from_aw1(self):
"""
Returns the Genomic Center's site ID from the AW1 filename
:return: GC site ID string
"""
return self.file_obj.fileName.split('/')[-1].split("_")[0].lower()
def _validate_collection_tube_id(self, collection_tube_id, bid):
"""
Returns true if biobank_ID is associated to biobank_stored_sample_id
(collection_tube_id)
:param collection_tube_id:
:param bid:
:return: boolean
"""
sample = self.sample_dao.get(collection_tube_id)
if sample:
return int(sample.biobankId) == int(bid)
return False
@staticmethod
def _get_qc_status_from_value(aw4_value):
"""
Returns the GenomicQcStatus enum value for
:param aw4_value: string from AW4 file (PASS/FAIL)
:return: GenomicQcStatus
"""
if aw4_value.strip().lower() == 'pass':
return GenomicQcStatus.PASS
elif aw4_value.strip().lower() == 'fail':
return GenomicQcStatus.FAIL
else:
logging.warning(f'Value from AW4 "{aw4_value}" is not PASS/FAIL.')
return GenomicQcStatus.UNSET
def create_new_member_from_aw1_control_sample(self, aw1_data: dict) -> GenomicSetMember:
"""
Creates a new control sample GenomicSetMember in RDR based on AW1 data
These will look like regular GenomicSetMember samples
:param aw1_data: dict from aw1 row
:return: GenomicSetMember
"""
# Writing new genomic_set_member based on AW1 data
max_set_id = self.member_dao.get_collection_tube_max_set_id()[0]
# Insert new member with biobank_id and collection tube ID from AW1
new_member_obj = GenomicSetMember(
genomicSetId=max_set_id,
participantId=0,
biobankId=aw1_data['biobankid'],
collectionTubeId=aw1_data['collectiontubeid'],
validationStatus=GenomicSetMemberStatus.VALID,
genomeType=aw1_data['testname'],
genomicWorkflowState=GenomicWorkflowState.AW1
)
# Set member attribures from AW1
new_member_obj = self._set_member_attributes_from_aw1(aw1_data, new_member_obj)
new_member_obj = self._set_rdr_member_attributes_for_aw1(aw1_data, new_member_obj)
return self.member_dao.insert(new_member_obj)
@staticmethod
def _participant_has_potentially_clean_samples(session, biobank_id):
"""Check for any stored sample for the participant that is not contaminated
and is a 1ED04, 1ED10, or 1SAL2 test"""
query = session.query(BiobankStoredSample).filter(
BiobankStoredSample.biobankId == biobank_id,
BiobankStoredSample.status < SampleStatus.SAMPLE_NOT_RECEIVED
).outerjoin(GenomicSampleContamination).filter(
GenomicSampleContamination.id.is_(None),
BiobankStoredSample.test.in_(['1ED04', '1ED10', '1SAL2'])
)
exists_query = session.query(query.exists())
return exists_query.scalar()
def _record_sample_as_contaminated(self, session, sample_id):
session.add(GenomicSampleContamination(
sampleId=sample_id,
failedInJob=self.job_id
))
def calculate_contamination_category(self, sample_id, raw_contamination, member: GenomicSetMember):
"""
Takes contamination value from AW2 and calculates GenomicContaminationCategory
:param sample_id:
:param raw_contamination:
:param member:
:return: GenomicContaminationCategory
"""
ps_dao = ParticipantSummaryDao()
ps = ps_dao.get(member.participantId)
contamination_category = GenomicContaminationCategory.UNSET
# No Extract if contamination <1%
if raw_contamination < 0.01:
contamination_category = GenomicContaminationCategory.NO_EXTRACT
# Only extract WGS if contamination between 1 and 3 % inclusive AND ROR
elif (0.01 <= raw_contamination <= 0.03) and ps.consentForGenomicsROR == QuestionnaireStatus.SUBMITTED:
contamination_category = GenomicContaminationCategory.EXTRACT_WGS
# No Extract if contamination between 1 and 3 % inclusive and GROR is not Yes
elif (0.01 <= raw_contamination <= 0.03) and ps.consentForGenomicsROR != QuestionnaireStatus.SUBMITTED:
contamination_category = GenomicContaminationCategory.NO_EXTRACT
# Extract Both if contamination > 3%
elif raw_contamination > 0.03:
contamination_category = GenomicContaminationCategory.EXTRACT_BOTH
with ps_dao.session() as session:
if raw_contamination >= 0.01:
# Record in the contamination table, regardless of GROR consent
self._record_sample_as_contaminated(session, sample_id)
if contamination_category != GenomicContaminationCategory.NO_EXTRACT and \
not self._participant_has_potentially_clean_samples(session, member.biobankId):
contamination_category = GenomicContaminationCategory.TERMINAL_NO_EXTRACT
return contamination_category
class GenomicFileValidator:
"""
This class validates the Genomic Centers files
"""
GENOME_TYPE_MAPPINGS = {
'gen': GENOME_TYPE_ARRAY,
'seq': GENOME_TYPE_WGS,
}
def __init__(self, filename=None, data=None, schema=None, job_id=None, controller=None):
self.filename = filename
self.data_to_validate = data
self.valid_schema = schema
self.job_id = job_id
self.genome_type = None
self.controller = controller
self.GC_METRICS_SCHEMAS = {
'seq': (
"biobankid",
"sampleid",
"biobankidsampleid",
"limsid",
"meancoverage",
"genomecoverage",
"aouhdrcoverage",
"contamination",
"sexconcordance",
"sexploidy",
"alignedq30bases",
"arrayconcordance",
"processingstatus",
"notes",
),
'gen': (
"biobankid",
"sampleid",
"biobankidsampleid",
"limsid",
"chipwellbarcode",
"callrate",
"sexconcordance",
"contamination",
"processingstatus",
"notes",
),
}
self.VALID_GENOME_CENTERS = ('uw', 'bam', 'bcm', 'bi', 'jh', 'rdr')
self.VALID_CVL_FACILITIES = ('rdr', 'color', 'uw', 'baylor')
self.GC_MANIFEST_SCHEMA = (
"packageid",
"biobankidsampleid",
"boxstorageunitid",
"boxid/plateid",
"wellposition",
"sampleid",
"parentsampleid",
"collectiontubeid",
"matrixid",
"collectiondate",
"biobankid",
"sexatbirth",
"age",
"nystate(y/n)",
"sampletype",
"treatments",
"quantity(ul)",
"totalconcentration(ng/ul)",
"totaldna(ng)",
"visitdescription",
"samplesource",
"study",
"trackingnumber",
"contact",
"email",
"studypi",
"testname",
"failuremode",
"failuremodedesc"
)
self.GEM_A2_SCHEMA = (
"biobankid",
"sampleid",
"success",
"dateofimport",
)
self.GEM_METRICS_SCHEMA = (
"biobankid",
"sampleid",
"ancestryloopresponse",
"availableresults",
"resultsreleasedat",
)
self.CVL_W2_SCHEMA = (
"genomicsetname",
"biobankid",
"sexatbirth",
"nyflag",
"siteid",
"secondaryvalidation",
"datesubmitted",
"testname",
)
self.AW4_ARRAY_SCHEMA = (
"biobankid",
"sampleid",
"sexatbirth",
"siteid",
"redidatpath",
"redidatmd5path",
"greenidatpath",
"greenidatmd5path",
"vcfpath",
"vcfindexpath",
"researchid",
"qcstatus",
"drcsexconcordance",
"drccallrate",
)
self.AW4_WGS_SCHEMA = (
"biobankid",
"sampleid",
"sexatbirth",
"siteid",
"vcfhfpath",
"vcfhfmd5path",
"vcfhfindexpath",
"vcfrawpath",
"vcfrawmd5path",
"vcfrawindexpath",
"crampath",
"crammd5path",
"craipath",
"researchid",
"qcstatus",
"drcsexconcordance",
"drccontamination",
"drcmeancoverage",
"drcfpconcordance",
)
self.AW5_WGS_SCHEMA = {
"biobankid",
"sampleid",
"biobankidsampleid",
"sexatbirth",
"siteid",
"vcfhf",
"vcfhfindex",
"vcfhfmd5",
"vcfraw",
"vcfrawindex",
"vcfrawmd5",
"cram",
"crammd5",
"crai",
"gvcf",
"gvcfmd5",
}
self.AW5_ARRAY_SCHEMA = {
"biobankid",
"sampleid",
"biobankidsampleid",
"sexatbirth",
"siteid",
"redidat",
"redidatmd5",
"greenidat",
"greenidatmd5",
"vcf",
"vcfindex",
"vcfmd5",
}
def validate_ingestion_file(self, *, filename, data_to_validate):
"""
Procedure to validate an ingestion file
:param filename:
:param data_to_validate:
:return: result code
"""
self.filename = filename
file_processed = self.controller.\
file_processed_dao.get_record_from_filename(filename)
if not self.validate_filename(filename):
return GenomicSubProcessResult.INVALID_FILE_NAME
# if not data_to_validate
struct_valid_result, missing_fields, expected = self._check_file_structure_valid(
data_to_validate['fieldnames'])
if struct_valid_result == GenomicSubProcessResult.INVALID_FILE_NAME:
return GenomicSubProcessResult.INVALID_FILE_NAME
if not struct_valid_result:
slack = True
invalid_message = f"{self.job_id.name}: File structure of {filename} is not valid."
if missing_fields:
invalid_message += f' Missing fields: {missing_fields}'
if len(missing_fields) == len(expected):
slack = False
self.controller.create_incident(
source_job_run_id=self.controller.job_run.id,
source_file_processed_id=file_processed.id,
code=GenomicIncidentCode.FILE_VALIDATION_FAILED_STRUCTURE.name,
message=invalid_message,
slack=slack
)
return GenomicSubProcessResult.INVALID_FILE_STRUCTURE
return GenomicSubProcessResult.SUCCESS
def validate_filename(self, filename):
"""
Applies a naming rule to an arbitrary filename
Naming rules are defined as local functions and
Mapped to a Genomic Job ID in naming_rules dict.
:param filename: passed to each name rule as 'fn'
:return: boolean
"""
if self.job_id in [GenomicJob.BB_RETURN_MANIFEST]:
filename_components = [x.lower() for x in filename.split('/')[-1].split("-")]
else:
filename_components = [x.lower() for x in filename.split('/')[-1].split("_")]
# Naming Rule Definitions
def bb_result_name_rule():
"""Biobank to DRC Result name rule"""
return (
filename_components[0] == 'genomic' and
filename_components[1] == 'manifest' and
filename_components[2] in ('aou_array', 'aou_wgs') and
filename.lower().endswith('csv')
)
def gc_validation_metrics_name_rule():
"""GC metrics file name rule"""
return (
filename_components[0] in self.VALID_GENOME_CENTERS and
filename_components[1] == 'aou' and
filename_components[2] in self.GC_METRICS_SCHEMAS.keys() and
filename.lower().endswith('csv')
)
def bb_to_gc_manifest_name_rule():
"""Biobank to GCs manifest name rule"""
return (
filename_components[0] in self.VALID_GENOME_CENTERS and
filename_components[1] == 'aou' and
filename_components[2] in ('seq', 'gen') and
filename.lower().endswith('csv')
)
def aw1f_manifest_name_rule():
"""Biobank to GCs Failure (AW1F) manifest name rule"""
return (
len(filename_components) == 5 and
filename_components[0] in self.VALID_GENOME_CENTERS and
filename_components[1] == 'aou' and
filename_components[2] in ('seq', 'gen') and
re.search(r"pkg-[0-9]{4}-[0-9]{5,}$",
filename_components[3]) is not None and
filename_components[4] == 'failure.csv' and
filename.lower().endswith('csv')
)
def cvl_w2_manifest_name_rule():
"""
CVL W2 (secondary validation) manifest name rule
UW_AoU_CVL_RequestValidation_Date.csv
"""
return (
len(filename_components) == 5 and
filename_components[0] in self.VALID_CVL_FACILITIES and
filename_components[1] == 'aou' and
filename_components[2] == 'cvl' and
filename_components[3] == 'requestvalidation' and
filename.lower().endswith('csv')
)
def gem_a2_manifest_name_rule():
"""GEM A2 manifest name rule: i.e. AoU_GEM_A2_manifest_2020-07-11-00-00-00.csv"""
return (
len(filename_components) == 5 and
filename_components[0] == 'aou' and
filename_components[1] == 'gem' and
filename_components[2] == 'a2' and
filename.lower().endswith('csv')
)
def cvl_aw1c_manifest_name_rule():
"""AW1C Biobank to CVLs manifest name rule"""
return (
filename_components[0] in self.VALID_GENOME_CENTERS and
filename_components[1] == 'aou' and
filename_components[2] == 'cvl' and
filename.lower().endswith('csv')
)
def cvl_aw1cf_manifest_name_rule():
"""AW1F Biobank to CVLs manifest name rule"""
return (
filename_components[0] in self.VALID_GENOME_CENTERS and
filename_components[1] == 'aou' and
filename_components[2] == 'cvl' and
filename_components[4] == 'failure.csv' and
filename.lower().endswith('csv')
)
def gem_metrics_name_rule():
"""GEM Metrics name rule: i.e. AoU_GEM_metrics_aggregate_2020-07-11-00-00-00.csv"""
return (
filename_components[0] == 'aou' and
filename_components[1] == 'gem' and
filename_components[2] == 'metrics' and
filename.lower().endswith('csv')
)
def aw4_arr_manifest_name_rule():
"""DRC Broad AW4 Array manifest name rule: i.e. AoU_DRCB_GEN_2020-07-11-00-00-00.csv"""
return (
filename_components[0] == 'aou' and
filename_components[1] == 'drcb' and
filename_components[2] == 'gen' and
filename.lower().endswith('csv')
)
def aw4_wgs_manifest_name_rule():
"""DRC Broad AW4 WGS manifest name rule: i.e. AoU_DRCB_SEQ_2020-07-11-00-00-00.csv"""
return (
filename_components[0] == 'aou' and
filename_components[1] == 'drcb' and
filename_components[2] == 'seq' and
filename.lower().endswith('csv')
)
def aw5_wgs_manifest_name_rule():
# don't have name convention right now, if have in the future, add here
return filename.lower().endswith('csv')
def aw5_array_manifest_name_rule():
# don't have name convention right now, if have in the future, add here
return filename.lower().endswith('csv')
name_rules = {
GenomicJob.BB_RETURN_MANIFEST: bb_result_name_rule,
GenomicJob.METRICS_INGESTION: gc_validation_metrics_name_rule,
GenomicJob.AW1_MANIFEST: bb_to_gc_manifest_name_rule,
GenomicJob.AW1F_MANIFEST: aw1f_manifest_name_rule,
GenomicJob.GEM_A2_MANIFEST: gem_a2_manifest_name_rule,
GenomicJob.W2_INGEST: cvl_w2_manifest_name_rule,
GenomicJob.AW1C_INGEST: cvl_aw1c_manifest_name_rule,
GenomicJob.AW1CF_INGEST: cvl_aw1cf_manifest_name_rule,
GenomicJob.AW4_ARRAY_WORKFLOW: aw4_arr_manifest_name_rule,
GenomicJob.AW4_WGS_WORKFLOW: aw4_wgs_manifest_name_rule,
GenomicJob.GEM_METRICS_INGEST: gem_metrics_name_rule,
GenomicJob.AW5_WGS_MANIFEST: aw5_wgs_manifest_name_rule,
GenomicJob.AW5_ARRAY_MANIFEST: aw5_array_manifest_name_rule,
}
is_valid_filename = name_rules[self.job_id]()
if not is_valid_filename:
invalid_message = f"{self.job_id.name}: File name {filename.split('/')[1]} has failed validation."
self.controller.create_incident(
save_incident=False,
slack=True,
message=invalid_message,
)
return is_valid_filename
def _check_file_structure_valid(self, fields):
"""
Validates the structure of the CSV against a defined set of columns.
:param fields: the data from the CSV file; dictionary per row.
:return: boolean; True if valid structure, False if not.
"""
missing_fields = None
if not self.valid_schema:
self.valid_schema = self._set_schema(self.filename)
if self.valid_schema == GenomicSubProcessResult.INVALID_FILE_NAME:
return GenomicSubProcessResult.INVALID_FILE_NAME
cases = tuple([field.lower().replace('\ufeff', '').replace(' ', '').replace('_', '')
for field in fields])
all_file_columns_valid = all([c in self.valid_schema for c in cases])
all_expected_columns_in_file = all([c in cases for c in self.valid_schema])
if not all_expected_columns_in_file:
missing_fields = list(set(self.valid_schema) - set(cases))
return all([all_file_columns_valid, all_expected_columns_in_file]), missing_fields, self.valid_schema
def _set_schema(self, filename):
"""Since the schemas are different for WGS and Array metrics files,
this parses the filename to return which schema
to use for validation of the CSV columns
:param filename: filename of the csv to validate in string format.
:return: schema_to_validate,
(tuple from the CSV_SCHEMA or result code of INVALID_FILE_NAME).
"""
try:
if self.job_id == GenomicJob.METRICS_INGESTION:
file_type = filename.lower().split("_")[2]
self.genome_type = self.GENOME_TYPE_MAPPINGS[file_type]
return self.GC_METRICS_SCHEMAS[file_type]
if self.job_id == GenomicJob.AW1_MANIFEST:
return self.GC_MANIFEST_SCHEMA
if self.job_id == GenomicJob.GEM_A2_MANIFEST:
return self.GEM_A2_SCHEMA
if self.job_id == GenomicJob.AW1F_MANIFEST:
return self.GC_MANIFEST_SCHEMA # AW1F and AW1 use same schema
if self.job_id == GenomicJob.GEM_METRICS_INGEST:
return self.GEM_METRICS_SCHEMA
if self.job_id == GenomicJob.W2_INGEST:
return self.CVL_W2_SCHEMA
if self.job_id == GenomicJob.AW4_ARRAY_WORKFLOW:
return self.AW4_ARRAY_SCHEMA
if self.job_id == GenomicJob.AW4_WGS_WORKFLOW:
return self.AW4_WGS_SCHEMA
if self.job_id in (GenomicJob.AW1C_INGEST, GenomicJob.AW1CF_INGEST):
return self.GC_MANIFEST_SCHEMA
if self.job_id == GenomicJob.AW5_WGS_MANIFEST:
self.genome_type = self.GENOME_TYPE_MAPPINGS['seq']
return self.AW5_WGS_SCHEMA
if self.job_id == GenomicJob.AW5_ARRAY_MANIFEST:
self.genome_type = self.GENOME_TYPE_MAPPINGS['gen']
return self.AW5_ARRAY_SCHEMA
except (IndexError, KeyError):
return GenomicSubProcessResult.INVALID_FILE_NAME
class GenomicFileMover:
"""
This utility class moves files in the bucket by copying into an archive folder
and deleting the old instance.
"""
def __init__(self, archive_folder=None):
self.archive_folder = archive_folder
def archive_file(self, file_obj=None, file_path=None):
"""
This method moves a file to an archive
by copy and delete
:param file_obj: a genomic_file_processed object to move
:return:
"""
source_path = file_obj.filePath if file_obj else file_path
file_name = source_path.split('/')[-1]
archive_path = source_path.replace(file_name,
f"{self.archive_folder}/"
f"{file_name}")
try:
copy_cloud_file(source_path, archive_path)
delete_cloud_file(source_path)
except FileNotFoundError:
logging.error(f"No file found at '{file_obj.filePath}'")
class GenomicReconciler:
""" This component handles reconciliation between genomic datasets """
def __init__(self, run_id, job_id, archive_folder=None, file_mover=None,
bucket_name=None, storage_provider=None, controller=None):
self.run_id = run_id
self.job_id = job_id
self.bucket_name = bucket_name
self.archive_folder = archive_folder
self.cvl_file_name = None
self.file_list = None
# Dao components
self.member_dao = GenomicSetMemberDao()
self.metrics_dao = GenomicGCValidationMetricsDao()
self.file_dao = GenomicFileProcessedDao()
# Other components
self.file_mover = file_mover
self.storage_provider = storage_provider
self.controller = controller
# Data files and names will be different
# file types are defined as
# (field_for_received_flag, filename suffix, field_for_gcs_path)
self.genotyping_file_types = (('idatRedReceived', "_red.idat", "idatRedPath"),
('idatGreenReceived', "_grn.idat", "idatGreenPath"),
('idatRedMd5Received', "_red.idat.md5sum", "idatRedMd5Path"),
('idatGreenMd5Received', "_grn.idat.md5sum", "idatGreenMd5Path"),
('vcfReceived', ".vcf.gz", "vcfPath"),
('vcfTbiReceived', ".vcf.gz.tbi", "vcfTbiPath"),
('vcfMd5Received', ".vcf.gz.md5sum", "vcfMd5Path"))
self.sequencing_file_types = (("hfVcfReceived", ".hard-filtered.vcf.gz", "hfVcfPath"),
("hfVcfTbiReceived", ".hard-filtered.vcf.gz.tbi", "hfVcfTbiPath"),
("hfVcfMd5Received", ".hard-filtered.vcf.gz.md5sum", "hfVcfMd5Path"),
("rawVcfReceived", ".vcf.gz", "rawVcfPath"),
("rawVcfTbiReceived", ".vcf.gz.tbi", "rawVcfTbiPath"),
("rawVcfMd5Received", ".vcf.gz.md5sum", "rawVcfMd5Path"),
("cramReceived", ".cram", "cramPath"),
("cramMd5Received", ".cram.md5sum", "cramMd5Path"),
("craiReceived", ".cram.crai", "craiPath"),
("gvcfReceived", ".hard-filtered.gvcf.gz", "gvcfPath"),
("gvcfMd5Received", ".hard-filtered.gvcf.gz.md5sum", "gvcfMd5Path"))
def reconcile_metrics_to_array_data(self, _gc_site_id):
""" The main method for the AW2 manifest vs. array data reconciliation
:param: _gc_site_id: "jh", "uw", "bi", etc.
:return: result code
"""
metrics = self.metrics_dao.get_with_missing_array_files(_gc_site_id)
total_missing_data = []
# Get list of files in GC data bucket
if self.storage_provider:
# Use the storage provider if it was set by tool
files = self.storage_provider.list(self.bucket_name, prefix=None)
else:
files = list_blobs('/' + self.bucket_name)
self.file_list = [f.name for f in files]
# Iterate over metrics, searching the bucket for filenames where *_received = 0
for metric in metrics:
member = self.member_dao.get(metric.genomicSetMemberId)
missing_data_files = []
metric_touched = False
for file_type in self.genotyping_file_types:
if not getattr(metric, file_type[0]):
filename = f"{metric.chipwellbarcode}{file_type[1]}"
file_exists = self._get_full_filename(filename)
if file_exists != 0:
setattr(metric, file_type[0], 1)
setattr(metric, file_type[2], f'gs://{self.bucket_name}/{file_exists}')
metric_touched = True
if not file_exists:
missing_data_files.append(filename)
if metric_touched:
# Only upsert the metric if changed
inserted_metrics_obj = self.metrics_dao.upsert(metric)
# Update GC Metrics for PDR
if inserted_metrics_obj:
bq_genomic_gc_validation_metrics_update(inserted_metrics_obj.id,
project_id=self.controller.bq_project_id)
genomic_gc_validation_metrics_update(inserted_metrics_obj.id)
next_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState, signal='gem-ready')
# Update Job Run ID on member
self.member_dao.update_member_job_run_id(member, self.run_id, 'reconcileMetricsSequencingJobRunId',
project_id=self.controller.bq_project_id)
else:
next_state = None
# Update state for missing files
if missing_data_files:
next_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState, signal='missing')
incident = self.controller.incident_dao.get_by_source_file_id(metric.genomicFileProcessedId)
if not incident or (incident and not any([i for i in incident if i.code == 'MISSING_FILES'])):
total_missing_data.append((metric.genomicFileProcessedId,
missing_data_files,
member
))
if next_state is not None and next_state != member.genomicWorkflowState:
self.member_dao.update_member_state(member, next_state, project_id=self.controller.bq_project_id)
# Make a roc ticket for missing data files
if total_missing_data:
description = f"{self.job_id.name}: The following AW2 manifests are missing data files."
description += f"\nGenomic Job Run ID: {self.run_id}"
for f in total_missing_data:
file = self.file_dao.get(f[0])
description += self._compile_missing_data_alert(
file_name=file.fileName,
missing_data=f[1]
)
self.controller.create_incident(
source_job_run_id=self.run_id,
source_file_processed_id=file.id,
code=GenomicIncidentCode.MISSING_FILES.name,
message=description,
genomic_set_member_id=f[2].id,
biobank_id=f[2].biobankId,
sample_id=f[2].sampleId if f[2].sampleId else "",
collection_tube_id=f[2].collectionTubeId if f[2].collectionTubeId else "",
slack=True
)
return GenomicSubProcessResult.SUCCESS
def reconcile_metrics_to_wgs_data(self, _gc_site_id):
""" The main method for the AW2 manifest vs. sequencing data reconciliation
:param: _gc_site_id: "jh", "uw", "bi", etc.
:return: result code
"""
metrics = self.metrics_dao.get_with_missing_wsg_files(_gc_site_id)
# Get list of files in GC data bucket
if self.storage_provider:
# Use the storage provider if it was set by tool
files = self.storage_provider.list(self.bucket_name, prefix=None)
else:
files = list_blobs('/' + self.bucket_name)
self.file_list = [f.name for f in files]
total_missing_data = []
metric_touched = False
# Iterate over metrics, searching the bucket for filenames
for metric in metrics:
member = self.member_dao.get(metric.GenomicGCValidationMetrics.genomicSetMemberId)
gc_prefix = _gc_site_id.upper()
missing_data_files = []
for file_type in self.sequencing_file_types:
if not getattr(metric.GenomicGCValidationMetrics, file_type[0]):
# Default filename in case the file is missing (used in alert)
default_filename = f"{gc_prefix}_{metric.biobankId}_{metric.sampleId}_" \
f"{metric.GenomicGCValidationMetrics.limsId}_1{file_type[1]}"
file_type_expression = file_type[1].replace('.', '\.')
# Naming rule for WGS files:
filename_exp = rf"{gc_prefix}_([A-Z]?){metric.biobankId}_{metric.sampleId}" \
rf"_{metric.GenomicGCValidationMetrics.limsId}_(\w*)(\d+){file_type_expression}$"
file_exists = self._get_full_filename_with_expression(filename_exp)
if file_exists != 0:
setattr(metric.GenomicGCValidationMetrics, file_type[0], 1)
setattr(metric.GenomicGCValidationMetrics, file_type[2],
f'gs://{self.bucket_name}/{file_exists}')
metric_touched = True
if not file_exists:
missing_data_files.append(default_filename)
if metric_touched:
# Only upsert the metric if changed
inserted_metrics_obj = self.metrics_dao.upsert(metric.GenomicGCValidationMetrics)
# Update GC Metrics for PDR
if inserted_metrics_obj:
bq_genomic_gc_validation_metrics_update(inserted_metrics_obj.id,
project_id=self.controller.bq_project_id)
genomic_gc_validation_metrics_update(inserted_metrics_obj.id)
next_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState, signal='cvl-ready')
self.member_dao.update_member_job_run_id(member, self.run_id, 'reconcileMetricsSequencingJobRunId',
project_id=self.controller.bq_project_id)
else:
next_state = None
# Handle for missing data files
if missing_data_files:
next_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState, signal='missing')
incident = self.controller.incident_dao.get_by_source_file_id(
metric.GenomicGCValidationMetrics.genomicFileProcessedId)
if not incident or (incident and not any([i for i in incident if i.code == 'MISSING_FILES'])):
total_missing_data.append((metric.GenomicGCValidationMetrics.genomicFileProcessedId,
missing_data_files,
member
))
# Update Member
if next_state is not None and next_state != member.genomicWorkflowState:
self.member_dao.update_member_state(member, next_state, project_id=self.controller.bq_project_id)
# Make a roc ticket for missing data files
if total_missing_data:
description = f"{self.job_id.name}: The following AW2 manifests are missing data files."
description += f"\nGenomic Job Run ID: {self.run_id}"
for f in total_missing_data:
file = self.file_dao.get(f[0])
description += self._compile_missing_data_alert(
file_name=file.fileName,
missing_data=f[1]
)
self.controller.create_incident(
source_job_run_id=self.run_id,
source_file_processed_id=file.id,
code=GenomicIncidentCode.MISSING_FILES.name,
message=description,
genomic_set_member_id=f[2].id,
biobank_id=f[2].biobankId,
sample_id=f[2].sampleId if f[2].sampleId else "",
collection_tube_id=f[2].collectionTubeId if f[2].collectionTubeId else "",
slack=True
)
return GenomicSubProcessResult.SUCCESS
@staticmethod
def _compile_missing_data_alert(file_name, missing_data):
"""
Compiles the description to include in a GenomicAlert
:param file_name:
:param missing_data: list of files
:return: summary, description
"""
file_list = '\n'.join([md for md in missing_data])
description = f"\nManifest File: {file_name}"
description += "\nMissing Genotype Data:"
description += f"\n{file_list}"
return description
def generate_cvl_reconciliation_report(self):
"""
The main method for the CVL Reconciliation report,
ouptuts report file to the cvl subfolder and updates
genomic_set_member
:return: result code
"""
members = self.member_dao.get_members_for_cvl_reconciliation()
if members:
cvl_subfolder = getSetting(GENOMIC_CVL_RECONCILIATION_REPORT_SUBFOLDER)
self.cvl_file_name = f"{cvl_subfolder}/cvl_report_{self.run_id}.csv"
self._write_cvl_report_to_file(members)
results = []
for member in members:
results.append(self.member_dao.update_member_job_run_id(
member, job_run_id=self.run_id,
field='reconcileCvlJobRunId')
)
return GenomicSubProcessResult.SUCCESS \
if GenomicSubProcessResult.ERROR not in results \
else GenomicSubProcessResult.ERROR
return GenomicSubProcessResult.NO_FILES
def reconcile_gem_report_states(self, _last_run_time=None):
"""
Scans GEM report states for changes
:param _last_run_time: the time when the current job last ran
"""
# Get unconsented members to update (consent > last run time of job_id)
unconsented_gror_members = self.member_dao.get_unconsented_gror_since_date(_last_run_time)
# update each member with the new state and withdrawal time
for member in unconsented_gror_members:
new_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState,
signal='unconsented')
if new_state is not None or new_state != member.genomicWorkflowState:
self.member_dao.update_member_state(member, new_state)
# Handle withdrawal (gror/primary consent) for reportConsentRemovalDate
removal_date = self.member_dao.get_gem_consent_removal_date(member)
self.member_dao.update_report_consent_removal_date(member, removal_date)
# Get reconsented members to update (consent > last run time of job_id)
reconsented_gror_members = self.member_dao.get_reconsented_gror_since_date(_last_run_time)
# update each member with the new state
for member in reconsented_gror_members:
new_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState,
signal='reconsented')
if new_state is not None or new_state != member.genomicWorkflowState:
self.member_dao.update_member_state(member, new_state)
self.member_dao.update_report_consent_removal_date(member, None)
@staticmethod
def _check_genotyping_file_exists(bucket_name, filename):
files = list_blobs('/' + bucket_name)
filenames = [f.name for f in files if f.name.endswith(filename)]
return 1 if len(filenames) > 0 else 0
def _get_full_filename(self, filename):
""" Searches file_list for names ending in filename
:param filename: file name to match
:return: first filename in list
"""
filenames = [name for name in self.file_list if name.lower().endswith(filename.lower())]
return filenames[0] if len(filenames) > 0 else 0
def _get_full_filename_with_expression(self, expression):
""" Searches file_list for names that match the expression
:param expression: pattern to match
:return: file name with highest revision number
"""
filenames = [name for name in self.file_list if re.search(expression, name)]
def sort_filenames(name):
version = name.split('.')[0].split('_')[-1]
if version[0].isalpha():
version = version[1:]
return int(version)
# Naturally sort the list in descending order of revisions
# ex: [name_11.ext, name_10.ext, name_9.ext, name_8.ext, etc.]
filenames.sort(reverse=True, key=sort_filenames)
return filenames[0] if len(filenames) > 0 else 0
def _get_sequence_files(self, bucket_name):
"""
Checks the bucket for sequencing files based on naming convention
:param bucket_name:
:return: file list or result code
"""
try:
files = list_blobs('/' + bucket_name)
# TODO: naming_convention is not yet finalized
naming_convention = r"^gc_sequencing_t\d*\.txt$"
files = [s.name for s in files
if self.archive_folder not in s.name.lower()
if re.search(naming_convention,
s.name.lower())]
if not files:
logging.info(
f'No valid sequencing files in bucket {bucket_name}'
)
return GenomicSubProcessResult.NO_FILES
return files
except FileNotFoundError:
return GenomicSubProcessResult.ERROR
def _parse_seq_filename(self, filename):
"""
Takes a sequencing filename and returns the biobank id.
:param filename:
:return: biobank_id
"""
# TODO: naming_convention is not yet finalized
try:
# pull biobank ID from filename
return filename.lower().split('_')[-1].split('.')[0][1:]
except IndexError:
return GenomicSubProcessResult.INVALID_FILE_NAME
def _update_genomic_set_member_seq_reconciliation(self, member, seq_file_name, job_run_id):
"""
Uses member DAO to update GenomicSetMember object
with sequencing reconciliation data
:param member: the GenomicSetMember to update
:param seq_file_name:
:param job_run_id:
:return: query result
"""
return self.member_dao.update_member_sequencing_file(member,
job_run_id,
seq_file_name)
def _write_cvl_report_to_file(self, members):
"""
writes data to csv file in bucket
:param members:
:return: result code
"""
try:
# extract only columns we need
cvl_columns = ('biobank_id', 'sample_id', 'member_id')
report_data = ((m.biobankId, m.sampleId, m.id) for m in members)
# Use SQL exporter
exporter = SqlExporter(self.bucket_name)
with exporter.open_cloud_writer(self.cvl_file_name) as writer:
writer.write_header(cvl_columns)
writer.write_rows(report_data)
return GenomicSubProcessResult.SUCCESS
except RuntimeError:
return GenomicSubProcessResult.ERROR
class GenomicBiobankSamplesCoupler:
"""This component creates the source data for Cohot 3:
new genomic set and members from the biobank samples pipeline.
Class uses the manifest handler to create and upload a manifest"""
_SEX_AT_BIRTH_CODES = {
'male': 'M',
'female': 'F',
'none_intersex': 'NA'
}
_VALIDATION_FLAGS = (GenomicValidationFlag.INVALID_WITHDRAW_STATUS,
GenomicValidationFlag.INVALID_SUSPENSION_STATUS,
GenomicValidationFlag.INVALID_CONSENT,
GenomicValidationFlag.INVALID_AGE,
GenomicValidationFlag.INVALID_AIAN,
GenomicValidationFlag.INVALID_SEX_AT_BIRTH)
_ARRAY_GENOME_TYPE = "aou_array"
_WGS_GENOME_TYPE = "aou_wgs"
_LR_GENOME_TYPE = "long_read"
COHORT_1_ID = "C1"
COHORT_2_ID = "C2"
COHORT_3_ID = "C3"
GenomicSampleMeta = namedtuple("GenomicSampleMeta", ["bids",
"pids",
"order_ids",
"site_ids",
"state_ids",
"sample_ids",
"valid_withdrawal_status",
"valid_suspension_status",
"gen_consents",
"valid_ages",
"sabs",
"gror",
"valid_ai_ans"])
def __init__(self, run_id, controller=None):
self.samples_dao = BiobankStoredSampleDao()
self.set_dao = GenomicSetDao()
self.member_dao = GenomicSetMemberDao()
self.site_dao = SiteDao()
self.ps_dao = ParticipantSummaryDao()
self.code_dao = CodeDao()
self.run_id = run_id
self.controller = controller
self.query = GenomicQueryClass()
def create_new_genomic_participants(self, from_date):
"""
This method determines which samples to enter into the genomic system
from Cohort 3 (New Participants).
Validation is handled in the query that retrieves the newly consented
participants' samples to process.
:param: from_date : the date from which to lookup new biobank_ids
:return: result
"""
samples = self._get_new_biobank_samples(from_date)
if len(samples) > 0:
samples_meta = self.GenomicSampleMeta(*samples)
return self.process_samples_into_manifest(samples_meta, cohort=self.COHORT_3_ID)
else:
logging.info(f'New Participant Workflow: No new samples to process.')
return GenomicSubProcessResult.NO_FILES
def create_saliva_genomic_participants(self, local=False, config=None):
"""
This method determines which samples to enter into
the genomic system that are saliva only, via the
config obj passed in the argument.
:param: config : options for ror consent type and denoting if sample was generated in-home or in-clinic
:return: result
"""
participants = self._get_remaining_saliva_participants(config)
if len(participants) > 0:
return self.create_matrix_and_process_samples(participants, cohort=None, local=local, saliva=True)
else:
logging.info(
f'Saliva Participant Workflow: No participants to process.')
return GenomicSubProcessResult.NO_FILES
def create_c2_genomic_participants(self, from_date, local=False):
"""
Creates Cohort 2 Participants in the genomic system using reconsent.
Validation is handled in the query that retrieves the newly consented
participants. Only valid participants are currently sent.
Refactored to first pull valid participants, then pull their samples,
applying the new business logic of prioritizing
collection date & blood over saliva.
:param: from_date : the date from which to lookup new participants
:return: result
"""
participants = self._get_new_c2_participants(from_date)
if len(participants) > 0:
return self.create_matrix_and_process_samples(participants, cohort=self.COHORT_2_ID, local=local)
else:
logging.info(f'Cohort 2 Participant Workflow: No participants to process.')
return GenomicSubProcessResult.NO_FILES
def create_c1_genomic_participants(self, from_date, local=False):
"""
Creates Cohort 1 Participants in the genomic system using reconsent.
Validation is handled in the query that retrieves the newly consented
participants. Only valid participants are currently sent.
:param: from_date : the date from which to lookup new participants
:return: result
"""
participants = self._get_new_c1_participants(from_date)
if len(participants) > 0:
return self.create_matrix_and_process_samples(participants, cohort=self.COHORT_1_ID, local=local)
else:
logging.info(f'Cohort 1 Participant Workflow: No participants to process.')
return GenomicSubProcessResult.NO_FILES
def create_long_read_genomic_participants(self, limit=None):
"""
Create long_read participants that are already in the genomic system,
based on downstream filters.
:return:
"""
participants = self._get_long_read_participants(limit)
if len(participants) > 0:
return self.process_genomic_members_into_manifest(
participants=participants,
genome_type=self._LR_GENOME_TYPE
)
logging.info(f'Long Read Participant Workflow: No participants to process.')
return GenomicSubProcessResult.NO_FILES
def process_genomic_members_into_manifest(self, *, participants, genome_type):
"""
Compiles AW0 Manifest from already submitted genomic members.
:param participants:
:param genome_type
:return:
"""
new_genomic_set = self._create_new_genomic_set()
processed_members = []
count = 0
# duplicate genomic set members
with self.member_dao.session() as session:
for i, participant in enumerate(participants):
dup_member_obj = GenomicSetMember(
biobankId=participant.biobankId,
genomicSetId=new_genomic_set.id,
participantId=participant.participantId,
nyFlag=participant.nyFlag,
sexAtBirth=participant.sexAtBirth,
collectionTubeId=participant.collectionTubeId,
validationStatus=participant.validationStatus,
validationFlags=participant.validationFlags,
ai_an=participant.ai_an,
genomeType=genome_type,
genomicWorkflowState=GenomicWorkflowState.LR_PENDING,
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
)
processed_members.append(dup_member_obj)
count = i + 1
if count % 100 == 0:
self.genomic_members_insert(
members=processed_members,
session=session,
set_id=new_genomic_set.id,
bids=[pm.biobankId for pm in processed_members]
)
processed_members.clear()
if count and processed_members:
self.genomic_members_insert(
members=processed_members,
session=session,
set_id=new_genomic_set.id,
bids=[pm.biobankId for pm in processed_members]
)
return new_genomic_set.id
def process_samples_into_manifest(self, samples_meta, cohort, saliva=False, local=False):
"""
Compiles AW0 Manifest from samples list.
:param samples_meta:
:param cohort:
:param saliva:
:param local: overrides automatic push to bucket
:return: job result code
"""
logging.info(f'{self.__class__.__name__}: Processing new biobank_ids {samples_meta.bids}')
new_genomic_set = self._create_new_genomic_set()
processed_array_wgs = []
count = 0
bids = []
# Create genomic set members
with self.member_dao.session() as session:
for i, bid in enumerate(samples_meta.bids):
# Don't write participant to table if no sample
if samples_meta.sample_ids[i] == 0:
continue
logging.info(f'Validating sample: {samples_meta.sample_ids[i]}')
validation_criteria = (
samples_meta.valid_withdrawal_status[i],
samples_meta.valid_suspension_status[i],
samples_meta.gen_consents[i],
samples_meta.valid_ages[i],
samples_meta.valid_ai_ans[i],
samples_meta.sabs[i] in self._SEX_AT_BIRTH_CODES.values()
)
valid_flags = self._calculate_validation_flags(validation_criteria)
logging.info(f'Creating genomic set members for PID: {samples_meta.pids[i]}')
# Get NY flag for collected-site
if samples_meta.site_ids[i]:
_ny_flag = self._get_new_york_flag_from_site(samples_meta.site_ids[i])
# Get NY flag for mail-kit
elif samples_meta.state_ids[i]:
_ny_flag = self._get_new_york_flag_from_state_id(samples_meta.state_ids[i])
# default ny flag if no state id
elif not samples_meta.state_ids[i]:
_ny_flag = 0
else:
logging.warning(f'No collection site or mail kit state. Skipping biobank_id: {bid}')
continue
new_array_member_obj = GenomicSetMember(
biobankId=bid,
genomicSetId=new_genomic_set.id,
participantId=samples_meta.pids[i],
nyFlag=_ny_flag,
sexAtBirth=samples_meta.sabs[i],
collectionTubeId=samples_meta.sample_ids[i],
validationStatus=(GenomicSetMemberStatus.INVALID if len(valid_flags) > 0
else GenomicSetMemberStatus.VALID),
validationFlags=valid_flags,
ai_an='N' if samples_meta.valid_ai_ans[i] else 'Y',
genomeType=self._ARRAY_GENOME_TYPE,
genomicWorkflowState=GenomicWorkflowState.AW0_READY,
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
)
# Also create a WGS member
new_wgs_member_obj = deepcopy(new_array_member_obj)
new_wgs_member_obj.genomeType = self._WGS_GENOME_TYPE
bids.append(bid)
processed_array_wgs.extend([new_array_member_obj, new_wgs_member_obj])
count = i + 1
if count % 1000 == 0:
self.genomic_members_insert(
members=processed_array_wgs,
session=session,
set_id=new_genomic_set.id,
bids=bids
)
processed_array_wgs.clear()
bids.clear()
if count and processed_array_wgs:
self.genomic_members_insert(
members=processed_array_wgs,
session=session,
set_id=new_genomic_set.id,
bids=bids
)
# Create & transfer the Biobank Manifest based on the new genomic set
try:
if local:
return new_genomic_set.id
else:
create_and_upload_genomic_biobank_manifest_file(new_genomic_set.id,
cohort_id=cohort,
saliva=saliva)
# Handle Genomic States for manifests
for member in self.member_dao.get_members_from_set_id(new_genomic_set.id):
new_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState,
signal='manifest-generated')
if new_state is not None or new_state != member.genomicWorkflowState:
self.member_dao.update_member_state(member, new_state)
logging.info(f'{self.__class__.__name__}: Genomic set members created ')
return GenomicSubProcessResult.SUCCESS
except RuntimeError:
return GenomicSubProcessResult.ERROR
def create_matrix_and_process_samples(self, participants, cohort, local, saliva=False):
"""
Wrapper method for processing participants for C1 and C2 manifests
:param cohort:
:param participants:
:param local:
:param saliva:
:return:
"""
participant_matrix = self.GenomicSampleMeta(*participants)
for i, _bid in enumerate(participant_matrix.bids):
logging.info(f'Retrieving samples for PID: f{participant_matrix.pids[i]}')
blood_sample_data = None
if not saliva:
blood_sample_data = self._get_usable_blood_sample(pid=participant_matrix.pids[i],
bid=_bid)
saliva_sample_data = self._get_usable_saliva_sample(pid=participant_matrix.pids[i],
bid=_bid)
# Determine which sample ID to use
sample_data = self._determine_best_sample(blood_sample_data, saliva_sample_data)
# update the sample id, collected site, and biobank order
if sample_data is not None:
participant_matrix.sample_ids[i] = sample_data[0]
participant_matrix.site_ids[i] = sample_data[1]
participant_matrix.order_ids[i] = sample_data[2]
else:
logging.info(f'No valid samples for pid {participant_matrix.pids[i]}.')
# insert new members and make the manifest
return self.process_samples_into_manifest(
participant_matrix,
cohort=cohort,
saliva=saliva,
local=local
)
def genomic_members_insert(self, *, members, session, set_id, bids):
"""
Bulk save of member for genomic_set_member as well as PDR
batch updating of members
:param: members
:param: session
:param: set_id
:param: bids
"""
try:
session.bulk_save_objects(members)
session.commit()
members = self.member_dao.get_members_from_set_id(set_id, bids=bids)
member_ids = [m.id for m in members]
bq_genomic_set_member_batch_update(member_ids, project_id=self.controller.bq_project_id)
genomic_set_member_batch_update(member_ids)
except Exception as e:
raise Exception("Error occurred on genomic member insert: {0}".format(e))
def _get_new_biobank_samples(self, from_date):
"""
Retrieves BiobankStoredSample objects with `rdr_created`
after the last run of the new participant workflow job.
The query filters out participants that do not match the
genomic validation requirements.
:param: from_date
:return: list of tuples (bid, pid, biobank_identifier.value, collected_site_id)
"""
_new_samples_sql = self.query.new_biobank_samples()
params = {
"sample_status_param": SampleStatus.RECEIVED.__int__(),
"dob_param": GENOMIC_VALID_AGE,
"general_consent_param": QuestionnaireStatus.SUBMITTED.__int__(),
"ai_param": Race.AMERICAN_INDIAN_OR_ALASKA_NATIVE.__int__(),
"from_date_param": from_date.strftime("%Y-%m-%d"),
"withdrawal_param": WithdrawalStatus.NOT_WITHDRAWN.__int__(),
"suspension_param": SuspensionStatus.NOT_SUSPENDED.__int__(),
"cohort_3_param": ParticipantCohort.COHORT_3.__int__(),
"ignore_param": GenomicWorkflowState.IGNORE.__int__(),
}
with self.samples_dao.session() as session:
result = session.execute(_new_samples_sql, params).fetchall()
result = self._prioritize_samples_by_participant(result)
return list(zip(*result))[:-2] # Slicing to remove the last two columns retrieved for prioritization
def _prioritize_samples_by_participant(self, sample_results):
preferred_samples = {}
for sample in sample_results:
preferred_sample = sample
previously_found_sample = preferred_samples.get(sample.participant_id, None)
if previously_found_sample is not None:
preferred_sample = self._determine_best_sample(previously_found_sample, sample)
preferred_samples[sample.participant_id] = preferred_sample
return list(preferred_samples.values())
@staticmethod
def _determine_best_sample(sample_one, sample_two):
if sample_one is None:
return sample_two
if sample_two is None:
return sample_one
# Return the usable sample (status less than NOT_RECEIVED) if one is usable and the other isn't
if sample_one.status < int(SampleStatus.SAMPLE_NOT_RECEIVED) <= sample_two.status:
return sample_one
elif sample_two.status < int(SampleStatus.SAMPLE_NOT_RECEIVED) <= sample_two.status:
return sample_two
elif sample_one.status >= int(SampleStatus.SAMPLE_NOT_RECEIVED) \
and sample_two.status >= int(SampleStatus.SAMPLE_NOT_RECEIVED):
return None
# Both are usable
# Return the sample by the priority of the code: 1ED04, then 1ED10, and 1SAL2 last
test_codes_by_preference = ['1ED04', '1ED10', '1SAL2'] # most desirable first
samples_by_code = {}
for sample in [sample_one, sample_two]:
samples_by_code[sample.test] = sample
for test_code in test_codes_by_preference:
if samples_by_code.get(test_code):
return samples_by_code[test_code]
logging.error(f'Should have been able to select between '
f'{sample_one.biobank_stored_sample_id} and {sample_two.biobank_stored_sample_id}')
def _get_new_c2_participants(self, from_date):
"""
Retrieves C2 participants and validation data.
Broken out so that DNA samples' business logic is handled separately
:param from_date:
:return:
"""
_c2_participant_sql = self.query.new_c2_participants()
params = {
"sample_status_param": SampleStatus.RECEIVED.__int__(),
"dob_param": GENOMIC_VALID_AGE,
"general_consent_param": QuestionnaireStatus.SUBMITTED.__int__(),
"ai_param": Race.AMERICAN_INDIAN_OR_ALASKA_NATIVE.__int__(),
"from_date_param": from_date.strftime("%Y-%m-%d"),
"withdrawal_param": WithdrawalStatus.NOT_WITHDRAWN.__int__(),
"suspension_param": SuspensionStatus.NOT_SUSPENDED.__int__(),
"cohort_2_param": ParticipantCohort.COHORT_2.__int__(),
"ignore_param": GenomicWorkflowState.IGNORE.__int__(),
}
with self.ps_dao.session() as session:
result = session.execute(_c2_participant_sql, params).fetchall()
return list([list(r) for r in zip(*result)])
def _get_remaining_c2_participants(self):
_c2_participant_sql = self.query.remaining_c2_participants()
params = {
"sample_status_param": SampleStatus.RECEIVED.__int__(),
"dob_param": GENOMIC_VALID_AGE,
"general_consent_param": QuestionnaireStatus.SUBMITTED.__int__(),
"ai_param": Race.AMERICAN_INDIAN_OR_ALASKA_NATIVE.__int__(),
"withdrawal_param": WithdrawalStatus.NOT_WITHDRAWN.__int__(),
"suspension_param": SuspensionStatus.NOT_SUSPENDED.__int__(),
"cohort_2_param": ParticipantCohort.COHORT_2.__int__(),
"ignore_param": GenomicWorkflowState.IGNORE.__int__(),
}
with self.ps_dao.session() as session:
result = session.execute(_c2_participant_sql, params).fetchall()
return list([list(r) for r in zip(*result)])
def _get_new_c1_participants(self, from_date):
"""
Retrieves C1 participants and validation data.
:param from_date:
:return:
"""
_c1_participant_sql = self.query.new_c1_participants()
params = {
"sample_status_param": SampleStatus.RECEIVED.__int__(),
"dob_param": GENOMIC_VALID_AGE,
"general_consent_param": QuestionnaireStatus.SUBMITTED.__int__(),
"ai_param": Race.AMERICAN_INDIAN_OR_ALASKA_NATIVE.__int__(),
"from_date_param": from_date.strftime("%Y-%m-%d"),
"withdrawal_param": WithdrawalStatus.NOT_WITHDRAWN.__int__(),
"suspension_param": SuspensionStatus.NOT_SUSPENDED.__int__(),
"cohort_1_param": ParticipantCohort.COHORT_1.__int__(),
"c1_reconsent_param": COHORT_1_REVIEW_CONSENT_YES_CODE,
"ignore_param": GenomicWorkflowState.IGNORE.__int__(),
}
with self.ps_dao.session() as session:
result = session.execute(_c1_participant_sql, params).fetchall()
return list([list(r) for r in zip(*result)])
def _get_long_read_participants(self, limit=None):
"""
Retrieves participants based on filters that have
been denoted to use in the long read pilot program
"""
with self.member_dao.session() as session:
gsm_alias = aliased(GenomicSetMember)
result = session.query(GenomicSetMember).join(
ParticipantSummary,
GenomicSetMember.participantId == ParticipantSummary.participantId,
).join(
ParticipantRaceAnswers,
ParticipantRaceAnswers.participantId == ParticipantSummary.participantId,
).join(
Code,
ParticipantRaceAnswers.codeId == Code.codeId,
).join(
GenomicGCValidationMetrics,
GenomicSetMember.id == GenomicGCValidationMetrics.genomicSetMemberId,
).outerjoin(
gsm_alias,
sqlalchemy.and_(
gsm_alias.participantId == ParticipantSummary.participantId,
gsm_alias.genomeType == 'long_read'
)
).filter(
Code.value == 'WhatRaceEthnicity_Black',
GenomicSetMember.genomeType.in_(['aou_wgs']),
GenomicSetMember.genomicWorkflowState != GenomicWorkflowState.IGNORE,
GenomicGCValidationMetrics.ignoreFlag == 0,
GenomicGCValidationMetrics.contamination <= 0.01,
ParticipantSummary.participantOrigin == 'vibrent',
ParticipantSummary.ehrUpdateTime.isnot(None),
gsm_alias.id.is_(None),
).distinct(gsm_alias.biobankId)
if limit:
result = result.limit(limit)
return result.all()
def _get_usable_blood_sample(self, pid, bid):
"""
Select 1ED04 or 1ED10 based on max collected date
:param pid: participant_id
:param bid: biobank_id
:return: tuple(blood_collected date, blood sample, blood site, blood order)
"""
_samples_sql = self.query.usable_blood_sample()
params = {
"pid_param": pid,
"bid_param": bid,
}
with self.samples_dao.session() as session:
result = session.execute(_samples_sql, params).first()
return result
def _get_usable_saliva_sample(self, pid, bid):
"""
Select 1SAL2 based on max collected date
:param pid: participant_id
:param bid: biobank_id
:return: tuple(saliva date, saliva sample, saliva site, saliva order)
"""
_samples_sql = self.query.usable_saliva_sample()
params = {
"pid_param": pid,
"bid_param": bid,
}
with self.samples_dao.session() as session:
result = session.execute(_samples_sql, params).first()
return result
def _get_remaining_saliva_participants(self, config):
_saliva_sql = self.query.remaining_saliva_participants(config)
params = {
"sample_status_param": SampleStatus.RECEIVED.__int__(),
"dob_param": GENOMIC_VALID_AGE,
"general_consent_param": QuestionnaireStatus.SUBMITTED.__int__(),
"ai_param": Race.AMERICAN_INDIAN_OR_ALASKA_NATIVE.__int__(),
"withdrawal_param": WithdrawalStatus.NOT_WITHDRAWN.__int__(),
"suspension_param": SuspensionStatus.NOT_SUSPENDED.__int__(),
"ignore_param": GenomicWorkflowState.IGNORE.__int__(),
}
with self.samples_dao.session() as session:
result = session.execute(_saliva_sql, params).fetchall()
return list([list(r) for r in zip(*result)])
def _create_new_genomic_set(self):
"""Inserts a new genomic set for this run"""
attributes = {
'genomicSetName': f'new_participant_workflow_{self.run_id}',
'genomicSetCriteria': '.',
'genomicSetVersion': 1,
'genomicSetStatus': GenomicSetStatus.VALID,
}
new_set_obj = GenomicSet(**attributes)
inserted_set = self.set_dao.insert(new_set_obj)
# Insert new set for PDR
bq_genomic_set_update(inserted_set.id, project_id=self.controller.bq_project_id)
genomic_set_update(inserted_set.id)
return inserted_set
def _create_new_set_member(self, **kwargs):
"""Inserts new GenomicSetMember object"""
new_member_obj = GenomicSetMember(**kwargs)
return self.member_dao.insert(new_member_obj)
def _get_new_york_flag_from_site(self, collected_site_id):
"""
Looks up whether a collected site's state is NY
:param collected_site_id: the id of the site
:return: int (1 or 0 for NY or Not)
"""
return int(self.site_dao.get(collected_site_id).state == 'NY')
def _get_new_york_flag_from_state_id(self, state_id):
"""
Looks up whether a collected site's state is NY
:param state_id: the code ID for the state
:return: int (1 or 0 for NY or Not)
"""
return int(self.code_dao.get(state_id).value.split('_')[1] == 'NY')
def _calculate_validation_flags(self, validation_criteria):
"""
Determines validation and flags for genomic sample
:param validation_criteria:
:return: list of validation flags
"""
# Process validation flags for inserting into genomic_set_member
flags = [flag for (passing, flag) in
zip(validation_criteria, self._VALIDATION_FLAGS)
if not passing]
return flags
class ManifestDefinitionProvider:
"""
Helper class to produce the definitions for each manifest
"""
# Metadata for the various manifests
ManifestDef = namedtuple('ManifestDef', ["job_run_field",
"source_data",
"destination_bucket",
"output_filename",
"columns",
"signal"])
def __init__(
self,
job_run_id=None,
bucket_name=None,
**kwargs
):
# Attributes
self.job_run_id = job_run_id
self.bucket_name = bucket_name
self.kwargs = kwargs
self.query = GenomicQueryClass(
input_manifest=self.kwargs['kwargs'].get('input_manifest')
)
self.manifest_columns_config = {
GenomicManifestTypes.CVL_W1: (
"genomic_set_name",
"biobank_id",
"sample_id",
"sex_at_birth",
"ny_flag",
"site_id",
"secondary_validation",
"date_submitted",
"test_name",
),
GenomicManifestTypes.AW3_ARRAY: (
"chipwellbarcode",
"biobank_id",
"sample_id",
"sex_at_birth",
"site_id",
"red_idat_path",
"red_idat_md5_path",
"green_idat_path",
"green_idat_md5_path",
"vcf_path",
"vcf_index_path",
"vcf_md5_path",
"callrate",
"sex_concordance",
"contamination",
"processing_status",
"research_id",
),
GenomicManifestTypes.GEM_A1: (
'biobank_id',
'sample_id',
"sex_at_birth",
"consent_for_ror",
"date_of_consent_for_ror",
"chipwellbarcode",
"genome_center",
),
GenomicManifestTypes.GEM_A3: (
'biobank_id',
'sample_id',
'date_of_consent_removal',
),
GenomicManifestTypes.CVL_W3: (
"value",
"sample_id",
"biobank_id",
"collection_tubeid",
"sex_at_birth",
"genome_type",
"ny_flag",
"request_id",
"package_id",
"ai_an",
"site_id",
),
GenomicManifestTypes.AW3_WGS: (
"biobank_id",
"sample_id",
"biobankidsampleid",
"sex_at_birth",
"site_id",
"vcf_hf_path",
"vcf_hf_index_path",
"vcf_hf_md5_path",
"vcf_raw_path",
"vcf_raw_index_path",
"vcf_raw_md5_path",
"cram_path",
"cram_md5_path",
"crai_path",
"gvcf_path",
"gvcf_md5_path",
"contamination",
"sex_concordance",
"processing_status",
"mean_coverage",
"research_id",
),
GenomicManifestTypes.AW2F: (
"PACKAGE_ID",
"BIOBANKID_SAMPLEID",
"BOX_STORAGEUNIT_ID",
"BOX_ID/PLATE_ID",
"WELL_POSITION",
"SAMPLE_ID",
"PARENT_SAMPLE_ID",
"COLLECTION_TUBE_ID",
"MATRIX_ID",
"COLLECTION_DATE",
"BIOBANK_ID",
"SEX_AT_BIRTH",
"AGE",
"NY_STATE_(Y/N)",
"SAMPLE_TYPE",
"TREATMENTS",
"QUANTITY_(uL)",
"TOTAL_CONCENTRATION_(ng/uL)",
"TOTAL_DNA(ng)",
"VISIT_DESCRIPTION",
"SAMPLE_SOURCE",
"STUDY",
"TRACKING_NUMBER",
"CONTACT",
"EMAIL",
"STUDY_PI",
"TEST_NAME",
"FAILURE_MODE",
"FAILURE_MODE_DESC",
"PROCESSING_STATUS",
"CONTAMINATION",
"CONTAMINATION_CATEGORY",
"CONSENT_FOR_ROR",
),
}
def _get_source_data_query(self, manifest_type):
"""
Returns the query to use for manifest's source data
:param manifest_type:
:return: query object
"""
try:
return self.query.genomic_data_config[manifest_type]
except KeyError:
logging.warning(f"Manifest type {manifest_type} does not resolve query")
def get_def(self, manifest_type):
"""
Returns the manifest definition based on manifest_type
:param manifest_type:
:return: ManifestDef()
"""
now_formatted = clock.CLOCK.now().strftime("%Y-%m-%d-%H-%M-%S")
def_config = {
GenomicManifestTypes.CVL_W1: {
'job_run_field': 'cvlW1ManifestJobRunId',
'output_filename': f'{CVL_W1_MANIFEST_SUBFOLDER}/AoU_CVL_Manifest_{now_formatted}.csv',
'signal': 'manifest-generated'
},
GenomicManifestTypes.GEM_A1: {
'job_run_field': 'gemA1ManifestJobRunId',
'output_filename': f'{GENOMIC_GEM_A1_MANIFEST_SUBFOLDER}/AoU_GEM_A1_manifest_{now_formatted}.csv',
'signal': 'manifest-generated'
},
GenomicManifestTypes.GEM_A3: {
'job_run_field': 'gemA3ManifestJobRunId',
'output_filename': f'{GENOMIC_GEM_A3_MANIFEST_SUBFOLDER}/AoU_GEM_A3_manifest_{now_formatted}.csv',
'signal': 'manifest-generated'
},
GenomicManifestTypes.CVL_W3: {
'job_run_field': 'cvlW3ManifestJobRunID',
'output_filename': f'{CVL_W3_MANIFEST_SUBFOLDER}/AoU_CVL_W1_{now_formatted}.csv',
'signal': 'manifest-generated'
},
GenomicManifestTypes.AW3_ARRAY: {
'job_run_field': 'aw3ManifestJobRunID',
'output_filename': f'{GENOMIC_AW3_ARRAY_SUBFOLDER}/AoU_DRCV_GEN_{now_formatted}.csv',
'signal': 'bypass'
},
GenomicManifestTypes.AW3_WGS: {
'job_run_field': 'aw3ManifestJobRunID',
'output_filename': f'{GENOMIC_AW3_WGS_SUBFOLDER}/AoU_DRCV_SEQ_{now_formatted}.csv',
'signal': 'bypass'
},
GenomicManifestTypes.AW2F: {
'job_run_field': 'aw2fManifestJobRunID',
'output_filename': f'{BIOBANK_AW2F_SUBFOLDER}/GC_AoU_DataType_PKG-YYMM-xxxxxx_contamination.csv',
'signal': 'bypass'
}
}
return self.ManifestDef(
job_run_field=def_config[manifest_type]['job_run_field'],
source_data=self._get_source_data_query(manifest_type),
destination_bucket=f'{self.bucket_name}',
output_filename=def_config[manifest_type]['output_filename'],
columns=self.manifest_columns_config[manifest_type],
signal=def_config[manifest_type]['signal'],
)
class ManifestCompiler:
"""
This component compiles Genomic manifests
based on definitions provided by ManifestDefinitionProvider
"""
def __init__(self, run_id, bucket_name=None):
self.run_id = run_id
self.bucket_name = bucket_name
self.output_file_name = None
self.manifest_def = None
self.def_provider = None
# Dao components
self.member_dao = GenomicSetMemberDao()
self.metrics_dao = GenomicGCValidationMetricsDao()
def generate_and_transfer_manifest(self, manifest_type, genome_type, **kwargs):
"""
Main execution method for ManifestCompiler
:return: result dict:
"code": (i.e. SUCCESS)
"feedback_file": None or feedback file record to update,
"record_count": integer
"""
self.def_provider = ManifestDefinitionProvider(
job_run_id=self.run_id,
bucket_name=self.bucket_name,
kwargs=kwargs
)
self.manifest_def = self.def_provider.get_def(manifest_type)
source_data = self._pull_source_data()
if source_data:
self.output_file_name = self.manifest_def.output_filename
# If the new manifest is a feedback manifest,
# it will have an input manifest
if "input_manifest" in kwargs.keys():
# AW2F manifest file name is based of of AW1
if manifest_type == GenomicManifestTypes.AW2F:
new_name = kwargs['input_manifest'].filePath.split('/')[-1]
new_name = new_name.replace('.csv', '_contamination.csv')
self.output_file_name = self.manifest_def.output_filename.replace(
"GC_AoU_DataType_PKG-YYMM-xxxxxx_contamination.csv",
f"{new_name}"
)
logging.info(
f'Preparing manifest of type {manifest_type}...'
f'{self.manifest_def.destination_bucket}/{self.output_file_name}'
)
self._write_and_upload_manifest(source_data)
results = []
record_count = len(source_data)
for row in source_data:
member = self.member_dao.get_member_from_sample_id(row.sample_id, genome_type)
if self.manifest_def.job_run_field is not None:
results.append(
self.member_dao.update_member_job_run_id(
member,
job_run_id=self.run_id,
field=self.manifest_def.job_run_field
)
)
# Handle Genomic States for manifests
if self.manifest_def.signal != "bypass":
new_state = GenomicStateHandler.get_new_state(member.genomicWorkflowState,
signal=self.manifest_def.signal)
if new_state is not None or new_state != member.genomicWorkflowState:
self.member_dao.update_member_state(member, new_state)
# Assemble result dict
result_code = GenomicSubProcessResult.SUCCESS \
if GenomicSubProcessResult.ERROR not in results \
else GenomicSubProcessResult.ERROR
result = {
"code": result_code,
"record_count": record_count,
}
return result
logging.info(f'No records found for manifest type: {manifest_type}.')
return {
"code": GenomicSubProcessResult.NO_FILES,
"record_count": 0,
}
def _pull_source_data(self):
"""
Runs the source data query
:return: result set
"""
with self.member_dao.session() as session:
return session.execute(self.manifest_def.source_data).fetchall()
def _write_and_upload_manifest(self, source_data):
"""
writes data to csv file in bucket
:return: result code
"""
try:
# Use SQL exporter
exporter = SqlExporter(self.bucket_name)
with exporter.open_cloud_writer(self.output_file_name) as writer:
writer.write_header(self.manifest_def.columns)
writer.write_rows(source_data)
return GenomicSubProcessResult.SUCCESS
except RuntimeError:
return GenomicSubProcessResult.ERROR
class GenomicAlertHandler:
"""
Creates a jira ROC ticket using Jira utils
"""
ROC_BOARD_ID = "ROC"
def __init__(self):
self._jira_handler = None
self.alert_envs = ["all-of-us-rdr-prod"]
if GAE_PROJECT in self.alert_envs:
self._jira_handler = JiraTicketHandler()
def make_genomic_alert(self, summary: str, description: str):
"""
Wraps create_ticket with genomic specifics
Get's the board ID and adds ticket to sprint
:param summary: the 'title' of the ticket
:param description: the 'body' of the ticket
"""
if self._jira_handler is not None:
ticket = self._jira_handler.create_ticket(summary, description,
board_id=self.ROC_BOARD_ID)
active_sprint = self._jira_handler.get_active_sprint(
self._jira_handler.get_board_by_id(self.ROC_BOARD_ID))
self._jira_handler.add_ticket_to_sprint(ticket, active_sprint)
else:
logging.info('Suppressing alert for missing files')
return
|
bsd-3-clause
| 7,104,423,912,018,031,000
| 39.924626
| 119
| 0.555604
| false
| 4.065294
| false
| false
| false
|
cherrypy/magicbus
|
magicbus/plugins/servers.py
|
1
|
15085
|
"""
Multiple servers/ports
======================
If you need to start more than one HTTP server (to serve on multiple ports, or
protocols, etc.), you can manually register each one and then start them all
with bus.transition("RUN")::
s1 = ServerPlugin(bus, MyWSGIServer(host='0.0.0.0', port=80))
s2 = ServerPlugin(bus, another.HTTPServer(host='127.0.0.1', SSL=True))
s1.subscribe()
s2.subscribe()
bus.transition("RUN")
.. index:: SCGI
FastCGI/SCGI
============
There are also Flup\ **F**\ CGIServer and Flup\ **S**\ CGIServer classes in
:mod:`magicbus.plugins.servers`. To start an fcgi server, for example,
wrap an instance of it in a ServerPlugin::
addr = ('0.0.0.0', 4000)
f = servers.FlupFCGIServer(application=mywsgiapp, bindAddress=addr)
s = servers.ServerPlugin(bus, httpserver=f, bind_addr=addr)
s.subscribe()
Note that you need to download and install `flup <http://trac.saddi.com/flup>`_
yourself.
.. _fastcgi:
.. index:: FastCGI
FastCGI
-------
A very simple setup lets your server run with FastCGI.
You just need the flup library,
plus a running Apache server (with ``mod_fastcgi``) or lighttpd server.
Apache
^^^^^^
At the top level in httpd.conf::
FastCgiIpcDir /tmp
FastCgiServer /path/to/myapp.fcgi -idle-timeout 120 -processes 4
And inside the relevant VirtualHost section::
# FastCGI config
AddHandler fastcgi-script .fcgi
ScriptAliasMatch (.*$) /path/to/myapp.fcgi$1
Lighttpd
^^^^^^^^
For `Lighttpd <http://www.lighttpd.net/>`_ you can follow these
instructions. Within ``lighttpd.conf`` make sure ``mod_fastcgi`` is
active within ``server.modules``. Then, within your ``$HTTP["host"]``
directive, configure your fastcgi script like the following::
$HTTP["url"] =~ "" {
fastcgi.server = (
"/" => (
"script.fcgi" => (
"bin-path" => "/path/to/your/script.fcgi",
"socket" => "/tmp/script.sock",
"check-local" => "disable",
"disable-time" => 1,
"min-procs" => 1,
"max-procs" => 1, # adjust as needed
),
),
)
} # end of $HTTP["url"] =~ "^/"
Please see `Lighttpd FastCGI Docs
<http://redmine.lighttpd.net/wiki/lighttpd/Docs:ModFastCGI>`_ for an
explanation of the possible configuration options.
"""
import socket
import sys
import threading
import time
import warnings
class ServerPlugin(object):
"""Bus plugin for an HTTP server.
You don't have to use this plugin; you can make your own that listens on
the appropriate bus channels. This one is designed to:
* wrap HTTP servers whose accept loop blocks by running it in a
separate thread; any exceptions in it exit the bus
* wait until the server is truly ready to receive requests before
returning from the bus START listener
* wait until the server has finished processing requestss before
returning from the bus STOP listener
* log server start/stop via the bus
The httpserver argument MUST possess 'start' and 'stop' methods,
and a 'ready' boolean attribute which is True when the HTTP server
is ready to receive requests on its socket.
If you need to start more than one HTTP server (to serve on multiple
ports, or protocols, etc.), you can manually register each one and then
start them all with bus.transition("RUN")::
s1 = ServerPlugin(bus, MyWSGIServer(host='0.0.0.0', port=80))
s2 = ServerPlugin(bus, another.HTTPServer(host='127.0.0.1', SSL=True))
s1.subscribe()
s2.subscribe()
bus.transition("RUN")
"""
def __init__(self, bus, httpserver=None, bind_addr=None):
self.bus = bus
self.httpserver = httpserver
self.bind_addr = bind_addr
self.interrupt = None
self.running = False
def subscribe(self):
self.bus.subscribe('START', self.START)
self.bus.subscribe('STOP', self.STOP)
def unsubscribe(self):
self.bus.unsubscribe('START', self.START)
self.bus.unsubscribe('STOP', self.STOP)
@property
def interface(self):
if self.bind_addr is None:
return 'unknown interface (dynamic?)'
elif isinstance(self.bind_addr, tuple):
host, port = self.bind_addr
return '%s:%s' % (host, port)
else:
return 'socket file: %s' % self.bind_addr
def START(self):
"""Start the HTTP server."""
if self.running:
self.bus.log('Already serving on %s' % self.interface)
return
self.interrupt = None
if not self.httpserver:
raise ValueError('No HTTP server has been created.')
# Start the httpserver in a new thread.
if isinstance(self.bind_addr, tuple):
wait_for_free_port(*self.bind_addr)
t = threading.Thread(target=self._start_http_thread)
t.setName('HTTPServer ' + t.getName())
self.bus.log('Starting on %s' % self.interface)
t.start()
self.wait()
self.running = True
self.bus.log('Serving on %s' % self.interface)
START.priority = 75
def _start_http_thread(self):
"""HTTP servers MUST be running in new threads, so that the
main thread persists to receive KeyboardInterrupt's. If an
exception is raised in the httpserver's thread then it's
trapped here, and the bus (and therefore our httpserver)
are shut down.
"""
try:
self.httpserver.start()
except KeyboardInterrupt:
self.bus.log('<Ctrl-C> hit: shutting down HTTP server')
self.interrupt = sys.exc_info()[1]
self.bus.transition('EXITED')
except SystemExit:
self.bus.log('SystemExit raised: shutting down HTTP server')
self.interrupt = sys.exc_info()[1]
self.bus.transition('EXITED')
raise
except:
self.interrupt = sys.exc_info()[1]
self.bus.log('Error in HTTP server: shutting down',
traceback=True, level=40)
self.bus.transition('EXITED')
raise
def wait(self):
"""Wait until the HTTP server is ready to receive requests."""
while not getattr(self.httpserver, 'ready', False):
if self.interrupt:
raise self.interrupt
time.sleep(.1)
# Wait for port to be occupied
if isinstance(self.bind_addr, tuple):
host, port = self.bind_addr
self.bus.log('Waiting for %s' % self.interface)
wait_for_occupied_port(host, port)
def STOP(self):
"""Stop the HTTP server."""
if self.running:
# stop() MUST block until the server is *truly* stopped.
self.httpserver.stop()
# Wait for the socket to be truly freed.
if isinstance(self.bind_addr, tuple):
wait_for_free_port(*self.bind_addr)
self.running = False
self.bus.log('HTTP Server %s shut down' % self.httpserver)
else:
self.bus.log('HTTP Server %s already shut down' % self.httpserver)
STOP.priority = 25
# ------- Wrappers for various HTTP servers for use with ServerPlugin ------- #
# These are not plugins, so they don't use the bus states as method names.
class FlupCGIServer(object):
"""Adapter for a flup.server.cgi.WSGIServer."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.ready = False
def start(self):
"""Start the CGI server."""
# We have to instantiate the server class here because its __init__
# starts a threadpool. If we do it too early, daemonize won't work.
from flup.server.cgi import WSGIServer
self.cgiserver = WSGIServer(*self.args, **self.kwargs)
self.ready = True
self.cgiserver.run()
def stop(self):
"""Stop the HTTP server."""
self.ready = False
class FlupFCGIServer(object):
"""Adapter for a flup.server.fcgi.WSGIServer."""
def __init__(self, *args, **kwargs):
if kwargs.get('bindAddress', None) is None:
if not hasattr(socket, 'fromfd'):
raise ValueError(
'Dynamic FCGI server not available on this platform. '
'You must use a static or external one by providing a '
'legal bindAddress.')
self.args = args
self.kwargs = kwargs
self.ready = False
def start(self):
"""Start the FCGI server."""
# We have to instantiate the server class here because its __init__
# starts a threadpool. If we do it too early, daemonize won't work.
from flup.server.fcgi import WSGIServer
self.fcgiserver = WSGIServer(*self.args, **self.kwargs)
# TODO: report this bug upstream to flup.
# If we don't set _oldSIGs on Windows, we get:
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 108, in run
# self._restoreSignalHandlers()
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 156, in _restoreSignalHandlers
# for signum,handler in self._oldSIGs:
# AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
self.fcgiserver._installSignalHandlers = lambda: None
self.fcgiserver._oldSIGs = []
self.ready = True
self.fcgiserver.run()
def stop(self):
"""Stop the HTTP server."""
# Forcibly stop the fcgi server main event loop.
self.fcgiserver._keepGoing = False
# Force all worker threads to die off.
self.fcgiserver._threadPool.maxSpare = (
self.fcgiserver._threadPool._idleCount)
self.ready = False
class FlupSCGIServer(object):
"""Adapter for a flup.server.scgi.WSGIServer."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.ready = False
def start(self):
"""Start the SCGI server."""
# We have to instantiate the server class here because its __init__
# starts a threadpool. If we do it too early, daemonize won't work.
from flup.server.scgi import WSGIServer
self.scgiserver = WSGIServer(*self.args, **self.kwargs)
# TODO: report this bug upstream to flup.
# If we don't set _oldSIGs on Windows, we get:
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 108, in run
# self._restoreSignalHandlers()
# File "C:\Python24\Lib\site-packages\flup\server\threadedserver.py",
# line 156, in _restoreSignalHandlers
# for signum,handler in self._oldSIGs:
# AttributeError: 'WSGIServer' object has no attribute '_oldSIGs'
self.scgiserver._installSignalHandlers = lambda: None
self.scgiserver._oldSIGs = []
self.ready = True
self.scgiserver.run()
def stop(self):
"""Stop the HTTP server."""
self.ready = False
# Forcibly stop the scgi server main event loop.
self.scgiserver._keepGoing = False
# Force all worker threads to die off.
self.scgiserver._threadPool.maxSpare = 0
# ---------------------------- Utility functions ---------------------------- #
def client_host(server_host):
"""Return the host on which a client can connect to the given listener."""
if server_host == '0.0.0.0':
# 0.0.0.0 is INADDR_ANY, which should answer on localhost.
return '127.0.0.1'
if server_host in ('::', '::0', '::0.0.0.0'):
# :: is IN6ADDR_ANY, which should answer on localhost.
# ::0 and ::0.0.0.0 are non-canonical but common ways to write
# IN6ADDR_ANY.
return '::1'
return server_host
def check_port(host, port, timeout=1.0):
"""Raise OSError if the given port is not free on the given host."""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
host = client_host(host)
port = int(port)
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6 addresses)
try:
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM)
except socket.gaierror:
if ':' in host:
info = [
(socket.AF_INET6, socket.SOCK_STREAM, 0, '',
(host, port, 0, 0))]
else:
info = [(socket.AF_INET, socket.SOCK_STREAM, 0, '', (host, port))]
for res in info:
af, socktype, proto, canonname, sa = res
s = None
try:
s = socket.socket(af, socktype, proto)
# See http://groups.google.com/group/cherrypy-users/
# browse_frm/thread/bbfe5eb39c904fe0
s.settimeout(timeout)
s.connect((host, port))
s.close()
except (IOError, OSError):
if s:
s.close()
else:
raise OSError(
'Port %s is in use on %s; perhaps the previous '
'httpserver did not shut down properly.' %
(repr(port), repr(host))
)
# Feel free to increase these defaults on slow systems:
free_port_timeout = 0.1
occupied_port_timeout = 0.25
def wait_for_free_port(host, port, timeout=None):
"""Wait for the specified port to become free (drop requests)."""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
if timeout is None:
timeout = free_port_timeout
for trial in range(50):
try:
# we are expecting a free port, so reduce the timeout
check_port(host, port, timeout=timeout)
except OSError:
# Give the old server thread time to free the port.
time.sleep(timeout)
else:
return
raise OSError('Port %r not free on %r' % (port, host))
def wait_for_occupied_port(host, port, timeout=None):
"""Wait for the specified port to become active (receive requests)."""
if not host:
raise ValueError("Host values of '' or None are not allowed.")
if timeout is None:
timeout = occupied_port_timeout
for trial in range(50):
try:
check_port(host, port, timeout=timeout)
except OSError:
return
else:
time.sleep(timeout)
if host == client_host(host):
raise OSError('Port %r not bound on %r' % (port, host))
# On systems where a loopback interface is not available and the
# server is bound to all interfaces, it's difficult to determine
# whether the server is in fact occupying the port. In this case,
# just issue a warning and move on. See issue #1100.
msg = 'Unable to verify that the server is bound on %r' % port
warnings.warn(msg)
|
bsd-3-clause
| 5,599,271,003,952,700,000
| 33.838337
| 79
| 0.6
| false
| 3.910057
| false
| false
| false
|
rjw57/foldbeam
|
foldbeam/rendering/renderer/tile_fetcher.py
|
1
|
8728
|
import math
import logging
import StringIO
import sys
import cairo
import numpy as np
from osgeo.osr import SpatialReference
from PIL import Image
import httplib2
from foldbeam.rendering.renderer.base import RendererBase, set_geo_transform
from foldbeam.rendering.renderer.decorator import reproject_from_native_spatial_reference
log = logging.getLogger()
class URLFetchError(Exception):
"""An error raised by a custom URL fetchber for TileFetcher if the URL could not be fetchbed."""
pass
class TileFetcher(RendererBase):
"""Render from slippy map tile URLs.
This is somewhat incomplete at the moment. Given a Google/Bing/OSM-style slippy map tile URL pattern of the form
``http://server/path/to/tiles/{zoom}/{x}/{y}.format``, this renderer can render the tiles to a Cairo context.
In addition to ``{x}`` and ``{y}``, ``{quadkey}`` can be used to support Bing-style quad keys. See
http://msdn.microsoft.com/en-us/library/bb259689.aspx.
.. note::
If no spatial reference is specified, it will default to EPSG:3857. Similarly, if no bounds are specified, the
default is to assume the bounds of this projection (x and y being +/- 20037508.34 metres).
The default URL pattern is ``http://otile1.mqcdn.com/tiles/1.0.0/osm/{zoom}/{x}/{y}.jpg`` which will load tiles
from the MapQuest servers.
If the *url_fetcher* parameter is specified, it is a callable which takes a single string giving a URL as the first
argument and returns a sequence of bytes for the URL contents. It can raise URLFetchError if the resource is not
available. If no fetcher is provided, :py:func:`default_url_fetcher` is used. The fetcher callable must be
thread-safe.
:param url_pattern: default is to use MapQuest, a pattern for calculating the URL to load tiles from
:type url_pattern: string
:param spatial_reference: default EPSG:3857, the native spatial reference for the tiles
:type spatial_reference: osgeo.osr.SpatialReference or None
:param tile_size: default (256, 256), the width and height of one tile in pixels
:type tile_size: tuple of integer or None
:param bounds: default as noted above, the left, right, top and bottom boundary of the projection
:type bounds: tuple of float or None
:param url_fetcher: which callable to use for URL fetching
:type url_fetcher: callable or None
"""
def __init__(self, url_pattern=None, spatial_reference=None, tile_size=None, bounds=None, url_fetcher=None):
super(TileFetcher, self).__init__()
self.url_pattern = url_pattern or 'http://otile1.mqcdn.com/tiles/1.0.0/osm/{zoom}/{x}/{y}.jpg'
self.tile_size = tile_size or (256, 256)
self.bounds = bounds or (-20037508.34, 20037508.34, 20037508.34, -20037508.34)
self.bounds_size = (abs(self.bounds[1] - self.bounds[0]), abs(self.bounds[3] - self.bounds[2]))
if spatial_reference is not None:
self.native_spatial_reference = spatial_reference
else:
self.native_spatial_reference = SpatialReference()
self.native_spatial_reference.ImportFromEPSG(3857)
self._fetch_url = url_fetcher or default_url_fetcher
@reproject_from_native_spatial_reference
def render_callable(self, context, spatial_reference=None):
if spatial_reference is not None and not spatial_reference.IsSame(self.native_spatial_reference):
raise ValueError('TileFetcher asked to render tile from incompatible spatial reference.')
# Calculate the distance in projection co-ordinates of one device pixel
pixel_size = context.device_to_user_distance(1,1)
# And hence the size in projection co-ordinates of one tile
ideal_tile_size = tuple([abs(x[0] * x[1]) for x in zip(pixel_size, self.tile_size)])
# How many math.powers of two smaller than the bounds is this?
ideal_zoom = tuple([math.log(x[0],2) - math.log(x[1],2) for x in zip(self.bounds_size, ideal_tile_size)])
# What zoom will we *actually* use
zoom = min(18, max(0, int(round(max(*ideal_zoom)))))
# How many tiles at this zoom level?
n_tiles = 1<<zoom
# Calculate the tile co-ordinates for the clip area extent
min_px, min_py, max_px, max_py = context.clip_extents()
# This give tile co-ordinates for the extremal tiles
tl = [int(math.floor(x)) for x in self._projection_to_tile(min_px, max_py, zoom)]
br = [int(math.floor(x)) for x in self._projection_to_tile(max_px, min_py, zoom)]
# extract the minimum/maximum x/y co-ordinate for the tiles
min_x, min_y = tl
max_x, max_y = br
tiles_to_fetch = []
for x in range(min_x, max_x+1):
# wrap the x co-ordinate in the number of tiles
wrapped_x = x % n_tiles
if wrapped_x < 0:
wrapped_x += n_tiles
for y in range(min_y, max_y+1):
# skip out of range y-tiles
if y < 0 or y >= n_tiles:
continue
# Calculate quadkey
quadkey = ''
for bit in xrange(zoom):
v = ((x>>bit)&0x1) + ((((y)>>bit)&0x1)<<1)
quadkey = str(v) + quadkey
url = self.url_pattern.format(x=wrapped_x, y=y, zoom=zoom, quadkey=quadkey)
tiles_to_fetch.append((x,y,url,self._fetch_url(url)))
def f():
# render the tiles as they come in
for x, y, url, data in tiles_to_fetch:
# load the tile into a cairo surface
surface = _cairo_surface_from_data(data)
# what extents should this tile have?
tile_x, tile_y, tile_w, tile_h = self._tile_extents(x, y, zoom)
tile_x_scale = surface.get_width() / tile_w
tile_y_scale = -surface.get_height() / tile_h
# set up the tile as a source
context.set_source_surface(surface)
context.get_source().set_matrix(cairo.Matrix(
xx = tile_x_scale,
yy = tile_y_scale,
x0 = -tile_x * tile_x_scale,
y0 = -tile_y * tile_y_scale + surface.get_height()
))
# we need to set the extend options to avoid interpolating towards zero-alpha at the edges
context.get_source().set_extend(cairo.EXTEND_PAD)
# draw the tile itself. We disable antialiasing because if the tile slightly overlaps an output
# pixel we want the interpolation of the tile to do the smoothing, not the rasteriser
context.save()
context.set_antialias(cairo.ANTIALIAS_NONE)
context.rectangle(tile_x, tile_y, tile_w, tile_h)
context.fill()
context.restore()
return f
def _tile_extents(self, tx, ty, zoom):
"""Return a tuple (minx, miny, width, height) giving the extents of a tile in projection co-ords."""
# Calculate size of one tile in projection co-ordinates
tile_size = tuple([math.pow(2.0, math.log(x,2) - zoom) for x in self.bounds_size])
left = tx * tile_size[0] + self.bounds[0]
top = self.bounds[2] - ty * tile_size[1]
return (left, top-tile_size[1], tile_size[0], tile_size[1])
def _projection_to_tile(self, px, py, zoom):
"""Convert from a projection co-ordinate to a tile co-ordinate. The tile co-ordinate system has an origin in the
top-left hand corner.
"""
# Calculate size of one tile in projection co-ordinates
tile_size = tuple([x / math.pow(2.0, zoom) for x in self.bounds_size])
# Map projection co-ords into tile co-ords
return tuple([x[0] / x[1] for x in zip((px-self.bounds[0], self.bounds[2]-py), tile_size)])
def default_url_fetcher(url):
"""The default URL fetcher to use in :py:class:`TileFetcher`. If there is an error fetching the URL a URLFetchError
is raised.
"""
http = httplib2.Http()
rep, content = http.request(url, 'GET')
if rep.status != 200:
raise URLFetchError(str(rep.status) + ' ' + rep.reason)
return content
def _cairo_surface_from_data(data):
# load via the PIL
image = Image.open(StringIO.StringIO(data)).convert('RGBA')
imw, imh = image.size
# swizzle RGBA -> BGRA
image = Image.frombuffer('RGBA', (imw, imh), image.tostring(), 'raw', 'BGRA', 0, 1)
# write into a Cairo surface
surface = cairo.ImageSurface.create_for_data(np.array(image), cairo.FORMAT_ARGB32, imw, imh)
return surface
|
apache-2.0
| -7,803,865,678,951,885,000
| 42.64
| 120
| 0.626604
| false
| 3.636667
| false
| false
| false
|
davy39/eric
|
Helpviewer/HelpBrowserWV.py
|
1
|
92524
|
# -*- coding: utf-8 -*-
# Copyright (c) 2008 - 2014 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing the helpbrowser using QWebView.
"""
from __future__ import unicode_literals
try:
str = unicode
except NameError:
pass
from PyQt5.QtCore import pyqtSlot, pyqtSignal, QObject, QT_TRANSLATE_NOOP, \
QUrl, QBuffer, QIODevice, QFileInfo, Qt, QTimer, QEvent, \
QRect, QFile, QPoint, QByteArray, qVersion
from PyQt5.QtGui import QDesktopServices, QClipboard, QMouseEvent, QColor, \
QPalette
from PyQt5.QtWidgets import qApp, QStyle, QMenu, QApplication, QInputDialog, \
QLineEdit, QLabel, QToolTip, QFrame, QDialog
from PyQt5.QtPrintSupport import QPrinter, QPrintDialog
from PyQt5.QtWebKit import QWebSettings
from PyQt5.QtWebKitWidgets import QWebView, QWebPage
try:
from PyQt5.QtWebKit import QWebElement
except ImportError:
pass
from PyQt5.QtNetwork import QNetworkReply, QNetworkRequest
import sip
from E5Gui import E5MessageBox, E5FileDialog
import Preferences
import UI.PixmapCache
try:
from PyQt5.QtNetwork import QSslCertificate
SSL_AVAILABLE = True
except ImportError:
SSL_AVAILABLE = False
###############################################################################
class JavaScriptExternalObject(QObject):
"""
Class implementing an external javascript object to add search providers.
"""
def __init__(self, mw, parent=None):
"""
Constructor
@param mw reference to the main window 8HelpWindow)
@param parent reference to the parent object (QObject)
"""
super(JavaScriptExternalObject, self).__init__(parent)
self.__mw = mw
@pyqtSlot(str)
def AddSearchProvider(self, url):
"""
Public slot to add a search provider.
@param url url of the XML file defining the search provider (string)
"""
self.__mw.openSearchManager().addEngine(QUrl(url))
class LinkedResource(object):
"""
Class defining a data structure for linked resources.
"""
def __init__(self):
"""
Constructor
"""
self.rel = ""
self.type_ = ""
self.href = ""
self.title = ""
###############################################################################
class JavaScriptEricObject(QObject):
"""
Class implementing an external javascript object to search via the
startpage.
"""
# these must be in line with the strings used by the javascript part of
# the start page
translations = [
QT_TRANSLATE_NOOP("JavaScriptEricObject",
"Welcome to eric6 Web Browser!"),
QT_TRANSLATE_NOOP("JavaScriptEricObject", "eric6 Web Browser"),
QT_TRANSLATE_NOOP("JavaScriptEricObject", "Search!"),
QT_TRANSLATE_NOOP("JavaScriptEricObject", "About eric6"),
]
def __init__(self, mw, parent=None):
"""
Constructor
@param mw reference to the main window 8HelpWindow)
@param parent reference to the parent object (QObject)
"""
super(JavaScriptEricObject, self).__init__(parent)
self.__mw = mw
@pyqtSlot(str, result=str)
def translate(self, trans):
"""
Public method to translate the given string.
@param trans string to be translated (string)
@return translation (string)
"""
if trans == "QT_LAYOUT_DIRECTION":
# special handling to detect layout direction
if qApp.isLeftToRight():
return "LTR"
else:
return "RTL"
return self.tr(trans)
@pyqtSlot(result=str)
def providerString(self):
"""
Public method to get a string for the search provider.
@return string for the search provider (string)
"""
return self.tr("Search results provided by {0}")\
.format(self.__mw.openSearchManager().currentEngineName())
@pyqtSlot(str, result=str)
def searchUrl(self, searchStr):
"""
Public method to get the search URL for the given search term.
@param searchStr search term (string)
@return search URL (string)
"""
return bytes(
self.__mw.openSearchManager().currentEngine()
.searchUrl(searchStr).toEncoded()).decode()
###############################################################################
class HelpWebPage(QWebPage):
"""
Class implementing an enhanced web page.
"""
_webPluginFactory = None
def __init__(self, parent=None):
"""
Constructor
@param parent parent widget of this window (QWidget)
"""
super(HelpWebPage, self).__init__(parent)
self.setPluginFactory(self.webPluginFactory())
self.__lastRequest = None
self.__lastRequestType = QWebPage.NavigationTypeOther
import Helpviewer.HelpWindow
from .Network.NetworkAccessManagerProxy import \
NetworkAccessManagerProxy
self.__proxy = NetworkAccessManagerProxy(self)
self.__proxy.setWebPage(self)
self.__proxy.setPrimaryNetworkAccessManager(
Helpviewer.HelpWindow.HelpWindow.networkAccessManager())
self.setNetworkAccessManager(self.__proxy)
self.__sslConfiguration = None
self.__proxy.finished.connect(self.__managerFinished)
self.__adBlockedEntries = []
self.loadStarted.connect(self.__loadStarted)
def acceptNavigationRequest(self, frame, request, type_):
"""
Public method to determine, if a request may be accepted.
@param frame reference to the frame sending the request (QWebFrame)
@param request reference to the request object (QNetworkRequest)
@param type_ type of the navigation request (QWebPage.NavigationType)
@return flag indicating acceptance (boolean)
"""
self.__lastRequest = request
if self.__lastRequest.url() != request.url() or \
type_ != QWebPage.NavigationTypeOther:
self.__lastRequestType = type_
scheme = request.url().scheme()
if scheme == "mailto":
QDesktopServices.openUrl(request.url())
return False
if type_ == QWebPage.NavigationTypeFormResubmitted:
res = E5MessageBox.yesNo(
self.view(),
self.tr("Resending POST request"),
self.tr(
"""In order to display the site, the request along with"""
""" all the data must be sent once again, which may lead"""
""" to some unexpected behaviour of the site e.g. the"""
""" same action might be performed once again. Do you"""
""" want to continue anyway?"""),
icon=E5MessageBox.Warning)
if not res:
return False
return QWebPage.acceptNavigationRequest(self, frame, request, type_)
def populateNetworkRequest(self, request):
"""
Public method to add data to a network request.
@param request reference to the network request object
(QNetworkRequest)
"""
try:
request.setAttribute(QNetworkRequest.User + 100, self)
if self.__lastRequest.url() == request.url():
request.setAttribute(QNetworkRequest.User + 101,
self.__lastRequestType)
if self.__lastRequestType == \
QWebPage.NavigationTypeLinkClicked:
request.setRawHeader("X-Eric6-UserLoadAction",
QByteArray("1"))
except TypeError:
pass
def pageAttributeId(self):
"""
Public method to get the attribute id of the page attribute.
@return attribute id of the page attribute (integer)
"""
return QNetworkRequest.User + 100
def supportsExtension(self, extension):
"""
Public method to check the support for an extension.
@param extension extension to test for (QWebPage.Extension)
@return flag indicating the support of extension (boolean)
"""
try:
if extension in [QWebPage.ErrorPageExtension,
QWebPage.ChooseMultipleFilesExtension]:
return True
except AttributeError:
pass
return QWebPage.supportsExtension(self, extension)
def extension(self, extension, option, output):
"""
Public method to implement a specific extension.
@param extension extension to be executed (QWebPage.Extension)
@param option provides input to the extension
(QWebPage.ExtensionOption)
@param output stores the output results (QWebPage.ExtensionReturn)
@return flag indicating a successful call of the extension (boolean)
"""
if extension == QWebPage.ChooseMultipleFilesExtension:
info = sip.cast(option,
QWebPage.ChooseMultipleFilesExtensionOption)
files = sip.cast(output,
QWebPage.ChooseMultipleFilesExtensionReturn)
if info is None or files is None:
return super(HelpWebPage, self).extension(
extension, option, output)
suggestedFileName = ""
if info.suggestedFileNames:
suggestedFileName = info.suggestedFileNames[0]
files.fileNames = E5FileDialog.getOpenFileNames(
None,
self.tr("Select files to upload..."),
suggestedFileName)
return True
if extension == QWebPage.ErrorPageExtension:
info = sip.cast(option, QWebPage.ErrorPageExtensionOption)
errorPage = sip.cast(output, QWebPage.ErrorPageExtensionReturn)
urlString = bytes(info.url.toEncoded()).decode()
errorPage.baseUrl = info.url
if info.domain == QWebPage.QtNetwork and \
info.error == QNetworkReply.ContentAccessDenied and \
info.errorString.startswith("AdBlockRule:"):
if info.frame != info.frame.page().mainFrame():
# content in <iframe>
docElement = info.frame.page().mainFrame()\
.documentElement()
for element in docElement.findAll("iframe"):
src = element.attribute("src")
if src in info.url.toString():
element.setAttribute("style", "display:none;")
return False
else:
# the whole page is blocked
rule = info.errorString.replace("AdBlockRule:", "")
title = self.tr("Content blocked by AdBlock Plus")
message = self.tr(
"Blocked by rule: <i>{0}</i>").format(rule)
htmlFile = QFile(":/html/adblockPage.html")
htmlFile.open(QFile.ReadOnly)
html = htmlFile.readAll()
html = html.replace(
"@FAVICON@", "qrc:icons/adBlockPlus16.png")
html = html.replace(
"@IMAGE@", "qrc:icons/adBlockPlus64.png")
html = html.replace("@TITLE@", title.encode("utf8"))
html = html.replace("@MESSAGE@", message.encode("utf8"))
errorPage.content = html
return True
if info.domain == QWebPage.QtNetwork and \
info.error == QNetworkReply.OperationCanceledError and \
info.errorString == "eric6:No Error":
return False
if info.domain == QWebPage.WebKit and info.error == 203:
# "Loading is handled by the media engine"
return False
title = self.tr("Error loading page: {0}").format(urlString)
htmlFile = QFile(":/html/notFoundPage.html")
htmlFile.open(QFile.ReadOnly)
html = htmlFile.readAll()
pixmap = qApp.style()\
.standardIcon(QStyle.SP_MessageBoxWarning).pixmap(48, 48)
imageBuffer = QBuffer()
imageBuffer.open(QIODevice.ReadWrite)
if pixmap.save(imageBuffer, "PNG"):
html = html.replace("@IMAGE@", imageBuffer.buffer().toBase64())
pixmap = qApp.style()\
.standardIcon(QStyle.SP_MessageBoxWarning).pixmap(16, 16)
imageBuffer = QBuffer()
imageBuffer.open(QIODevice.ReadWrite)
if pixmap.save(imageBuffer, "PNG"):
html = html.replace(
"@FAVICON@", imageBuffer.buffer().toBase64())
html = html.replace("@TITLE@", title.encode("utf8"))
html = html.replace("@H1@", info.errorString.encode("utf8"))
html = html.replace(
"@H2@", self.tr("When connecting to: {0}.")
.format(urlString).encode("utf8"))
html = html.replace(
"@LI-1@",
self.tr("Check the address for errors such as "
"<b>ww</b>.example.org instead of "
"<b>www</b>.example.org").encode("utf8"))
html = html.replace(
"@LI-2@",
self.tr(
"If the address is correct, try checking the network "
"connection.").encode("utf8"))
html = html.replace(
"@LI-3@",
self.tr(
"If your computer or network is protected by a firewall "
"or proxy, make sure that the browser is permitted to "
"access the network.").encode("utf8"))
html = html.replace(
"@LI-4@",
self.tr("If your cache policy is set to offline browsing,"
"only pages in the local cache are available.")
.encode("utf8"))
html = html.replace(
"@BUTTON@", self.tr("Try Again").encode("utf8"))
errorPage.content = html
return True
return QWebPage.extension(self, extension, option, output)
def __loadStarted(self):
"""
Private method to handle the loadStarted signal.
"""
self.__adBlockedEntries = []
def addAdBlockRule(self, rule, url):
"""
Public slot to add an AdBlock rule to the page.
@param rule AdBlock rule to add (AdBlockRule)
@param url URL that matched the rule (QUrl)
"""
from .AdBlock.AdBlockPage import AdBlockedPageEntry
entry = AdBlockedPageEntry(rule, url)
if entry not in self.__adBlockedEntries:
self.__adBlockedEntries.append(entry)
def getAdBlockedPageEntries(self):
"""
Public method to get the list of AdBlock page entries.
@return list of AdBlock page entries (list of AdBlockedPageEntry)
"""
return self.__adBlockedEntries
def url(self):
"""
Public method to get the URL of the page.
@return URL of the page (QUrl)
"""
return self.mainFrame().url()
def userAgent(self, resolveEmpty=False):
"""
Public method to get the global user agent setting.
@param resolveEmpty flag indicating to resolve an empty
user agent (boolean)
@return user agent string (string)
"""
agent = Preferences.getHelp("UserAgent")
if agent == "" and resolveEmpty:
agent = self.userAgentForUrl(QUrl())
return agent
def setUserAgent(self, agent):
"""
Public method to set the global user agent string.
@param agent new current user agent string (string)
"""
Preferences.setHelp("UserAgent", agent)
def userAgentForUrl(self, url):
"""
Public method to determine the user agent for the given URL.
@param url URL to determine user agent for (QUrl)
@return user agent string (string)
"""
import Helpviewer.HelpWindow
agent = Helpviewer.HelpWindow.HelpWindow.userAgentsManager()\
.userAgentForUrl(url)
if agent == "":
# no agent string specified for the given host -> use global one
agent = Preferences.getHelp("UserAgent")
if agent == "":
# no global agent string specified -> use default one
agent = QWebPage.userAgentForUrl(self, url)
return agent
def __managerFinished(self, reply):
"""
Private slot to handle a finished reply.
This slot is used to get SSL related information for a reply.
@param reply reference to the finished reply (QNetworkReply)
"""
try:
frame = reply.request().originatingObject()
except AttributeError:
frame = None
mainFrameRequest = frame == self.mainFrame()
if mainFrameRequest and \
self.__sslConfiguration is not None and \
reply.url() == self.mainFrame().url():
self.__sslConfiguration = None
if reply.error() == QNetworkReply.NoError and \
mainFrameRequest and \
self.__sslConfiguration is None and \
reply.url().scheme().lower() == "https" and \
reply.url() == self.mainFrame().url():
self.__sslConfiguration = reply.sslConfiguration()
self.__sslConfiguration.url = QUrl(reply.url())
if reply.error() == QNetworkReply.NoError and \
mainFrameRequest and \
reply.url() == self.mainFrame().url():
modified = reply.header(QNetworkRequest.LastModifiedHeader)
if modified and modified.isValid():
import Helpviewer.HelpWindow
manager = Helpviewer.HelpWindow.HelpWindow.bookmarksManager()
from .Bookmarks.BookmarkNode import BookmarkNode
for bookmark in manager.bookmarksForUrl(reply.url()):
manager.setTimestamp(bookmark, BookmarkNode.TsModified,
modified)
def getSslCertificate(self):
"""
Public method to get a reference to the SSL certificate.
@return amended SSL certificate (QSslCertificate)
"""
if self.__sslConfiguration is None:
return None
sslInfo = self.__sslConfiguration.peerCertificate()
sslInfo.url = QUrl(self.__sslConfiguration.url)
return sslInfo
def getSslCertificateChain(self):
"""
Public method to get a reference to the SSL certificate chain.
@return SSL certificate chain (list of QSslCertificate)
"""
if self.__sslConfiguration is None:
return []
chain = self.__sslConfiguration.peerCertificateChain()
return chain
def getSslConfiguration(self):
"""
Public method to return a reference to the current SSL configuration.
@return reference to the SSL configuration in use (QSslConfiguration)
"""
return self.__sslConfiguration
def showSslInfo(self, pos):
"""
Public slot to show some SSL information for the loaded page.
@param pos position to show the info at (QPoint)
"""
if SSL_AVAILABLE and self.__sslConfiguration is not None:
from E5Network.E5SslInfoWidget import E5SslInfoWidget
widget = E5SslInfoWidget(
self.mainFrame().url(), self.__sslConfiguration, self.view())
widget.showAt(pos)
else:
E5MessageBox.warning(
self.view(),
self.tr("SSL Info"),
self.tr("""This site does not contain SSL information."""))
def hasValidSslInfo(self):
"""
Public method to check, if the page has a valid SSL certificate.
@return flag indicating a valid SSL certificate (boolean)
"""
if self.__sslConfiguration is None:
return False
certList = self.__sslConfiguration.peerCertificateChain()
if not certList:
return False
certificateDict = Preferences.toDict(
Preferences.Prefs.settings.value("Ssl/CaCertificatesDict"))
for server in certificateDict:
localCAList = QSslCertificate.fromData(certificateDict[server])
for cert in certList:
if cert in localCAList:
return True
if qVersion() >= "5.0.0":
for cert in certList:
if cert.isBlacklisted():
return False
else:
for cert in certList:
if not cert.isValid():
return False
return True
@classmethod
def webPluginFactory(cls):
"""
Class method to get a reference to the web plug-in factory
instance.
@return reference to the web plug-in factory instance (WebPluginFactory
"""
if cls._webPluginFactory is None:
from .WebPlugins.WebPluginFactory import WebPluginFactory
cls._webPluginFactory = WebPluginFactory()
return cls._webPluginFactory
def event(self, evt):
"""
Public method implementing the event handler.
@param evt reference to the event (QEvent)
@return flag indicating that the event was handled (boolean)
"""
if evt.type() == QEvent.Leave:
# Fake a mouse move event just outside of the widget to trigger
# the WebKit event handler's mouseMoved function. This implements
# the interesting mouse-out behavior like invalidating scrollbars.
fakeEvent = QMouseEvent(QEvent.MouseMove, QPoint(0, -1),
Qt.NoButton, Qt.NoButton, Qt.NoModifier)
return super(HelpWebPage, self).event(fakeEvent)
return super(HelpWebPage, self).event(evt)
###############################################################################
class HelpBrowser(QWebView):
"""
Class implementing the helpbrowser widget.
This is a subclass of the Qt QWebView to implement an
interface compatible with the QTextBrowser based variant.
@signal sourceChanged(QUrl) emitted after the current URL has changed
@signal forwardAvailable(bool) emitted after the current URL has changed
@signal backwardAvailable(bool) emitted after the current URL has changed
@signal highlighted(str) emitted, when the mouse hovers over a link
@signal search(QUrl) emitted, when a search is requested
@signal zoomValueChanged(int) emitted to signal a change of the zoom value
"""
sourceChanged = pyqtSignal(QUrl)
forwardAvailable = pyqtSignal(bool)
backwardAvailable = pyqtSignal(bool)
highlighted = pyqtSignal(str)
search = pyqtSignal(QUrl)
zoomValueChanged = pyqtSignal(int)
ZoomLevels = [
30, 50, 67, 80, 90,
100,
110, 120, 133, 150, 170, 200, 240, 300,
]
ZoomLevelDefault = 100
def __init__(self, mainWindow, parent=None, name=""):
"""
Constructor
@param mainWindow reference to the main window (HelpWindow)
@param parent parent widget of this window (QWidget)
@param name name of this window (string)
"""
super(HelpBrowser, self).__init__(parent)
self.setObjectName(name)
self.setWhatsThis(self.tr(
"""<b>Help Window</b>"""
"""<p>This window displays the selected help information.</p>"""
))
import Helpviewer.HelpWindow
self.__speedDial = Helpviewer.HelpWindow.HelpWindow.speedDial()
self.__page = HelpWebPage(self)
self.setPage(self.__page)
self.mw = mainWindow
self.ctrlPressed = False
self.__isLoading = False
self.__progress = 0
self.__currentZoom = 100
self.__zoomLevels = HelpBrowser.ZoomLevels[:]
self.__javaScriptBinding = None
self.__javaScriptEricObject = None
self.mw.zoomTextOnlyChanged.connect(self.__applyZoom)
self.page().setLinkDelegationPolicy(QWebPage.DelegateAllLinks)
self.linkClicked.connect(self.setSource)
self.urlChanged.connect(self.__urlChanged)
self.statusBarMessage.connect(self.__statusBarMessage)
self.page().linkHovered.connect(self.__linkHovered)
self.loadStarted.connect(self.__loadStarted)
self.loadProgress.connect(self.__loadProgress)
self.loadFinished.connect(self.__loadFinished)
self.page().setForwardUnsupportedContent(True)
self.page().unsupportedContent.connect(self.__unsupportedContent)
self.page().downloadRequested.connect(self.__downloadRequested)
self.page().frameCreated.connect(self.__addExternalBinding)
self.__addExternalBinding(self.page().mainFrame())
self.page().databaseQuotaExceeded.connect(self.__databaseQuotaExceeded)
self.mw.openSearchManager().currentEngineChanged.connect(
self.__currentEngineChanged)
self.setAcceptDrops(True)
self.__enableAccessKeys = Preferences.getHelp("AccessKeysEnabled")
self.__accessKeysPressed = False
self.__accessKeyLabels = []
self.__accessKeyNodes = {}
self.page().loadStarted.connect(self.__hideAccessKeys)
self.page().scrollRequested.connect(self.__hideAccessKeys)
self.__rss = []
self.__clickedFrame = None
self.mw.personalInformationManager().connectPage(self.page())
self.mw.greaseMonkeyManager().connectPage(self.page())
self.grabGesture(Qt.PinchGesture)
def __addExternalBinding(self, frame=None):
"""
Private slot to add javascript bindings for adding search providers.
@param frame reference to the web frame (QWebFrame)
"""
self.page().settings().setAttribute(QWebSettings.JavascriptEnabled,
True)
if self.__javaScriptBinding is None:
self.__javaScriptBinding = JavaScriptExternalObject(self.mw, self)
if frame is None:
# called from QWebFrame.javaScriptWindowObjectCleared
frame = self.sender()
if isinstance(frame, HelpWebPage):
frame = frame.mainFrame()
if frame.url().scheme() == "eric" and frame.url().path() == "home":
if self.__javaScriptEricObject is None:
self.__javaScriptEricObject = JavaScriptEricObject(
self.mw, self)
frame.addToJavaScriptWindowObject(
"eric", self.__javaScriptEricObject)
elif frame.url().scheme() == "eric" and \
frame.url().path() == "speeddial":
frame.addToJavaScriptWindowObject(
"speeddial", self.__speedDial)
self.__speedDial.addWebFrame(frame)
else:
# called from QWebPage.frameCreated
frame.javaScriptWindowObjectCleared.connect(
self.__addExternalBinding)
frame.addToJavaScriptWindowObject("external", self.__javaScriptBinding)
def linkedResources(self, relation=""):
"""
Public method to extract linked resources.
@param relation relation to extract (string)
@return list of linked resources (list of LinkedResource)
"""
resources = []
baseUrl = self.page().mainFrame().baseUrl()
linkElements = self.page().mainFrame().findAllElements(
"html > head > link")
for linkElement in linkElements.toList():
rel = linkElement.attribute("rel")
href = linkElement.attribute("href")
type_ = linkElement.attribute("type")
title = linkElement.attribute("title")
if href == "" or type_ == "":
continue
if relation and rel != relation:
continue
resource = LinkedResource()
resource.rel = rel
resource.type_ = type_
resource.href = baseUrl.resolved(QUrl.fromEncoded(href))
resource.title = title
resources.append(resource)
return resources
def __currentEngineChanged(self):
"""
Private slot to track a change of the current search engine.
"""
if self.url().toString() == "eric:home":
self.reload()
def setSource(self, name, requestData=None):
"""
Public method used to set the source to be displayed.
@param name filename to be shown (QUrl)
@param requestData tuple containing the request data (QNetworkRequest,
QNetworkAccessManager.Operation, QByteArray)
"""
if (name is None or not name.isValid()) and requestData is None:
return
if name is None and requestData is not None:
name = requestData[0].url()
if self.ctrlPressed:
# open in a new window
self.mw.newTab(name)
self.ctrlPressed = False
return
if not name.scheme():
name.setUrl(Preferences.getHelp("DefaultScheme") + name.toString())
if len(name.scheme()) == 1 or \
name.scheme() == "file":
# name is a local file
if name.scheme() and len(name.scheme()) == 1:
# it is a local path on win os
name = QUrl.fromLocalFile(name.toString())
if not QFileInfo(name.toLocalFile()).exists():
E5MessageBox.critical(
self,
self.tr("eric6 Web Browser"),
self.tr(
"""<p>The file <b>{0}</b> does not exist.</p>""")
.format(name.toLocalFile()))
return
if name.toLocalFile().endswith(".pdf") or \
name.toLocalFile().endswith(".PDF") or \
name.toLocalFile().endswith(".chm") or \
name.toLocalFile().endswith(".CHM"):
started = QDesktopServices.openUrl(name)
if not started:
E5MessageBox.critical(
self,
self.tr("eric6 Web Browser"),
self.tr(
"""<p>Could not start a viewer"""
""" for file <b>{0}</b>.</p>""")
.format(name.path()))
return
elif name.scheme() in ["mailto"]:
started = QDesktopServices.openUrl(name)
if not started:
E5MessageBox.critical(
self,
self.tr("eric6 Web Browser"),
self.tr(
"""<p>Could not start an application"""
""" for URL <b>{0}</b>.</p>""")
.format(name.toString()))
return
elif name.scheme() == "javascript":
scriptSource = QUrl.fromPercentEncoding(name.toString(
QUrl.FormattingOptions(QUrl.TolerantMode | QUrl.RemoveScheme)))
self.page().mainFrame().evaluateJavaScript(scriptSource)
return
else:
if name.toString().endswith(".pdf") or \
name.toString().endswith(".PDF") or \
name.toString().endswith(".chm") or \
name.toString().endswith(".CHM"):
started = QDesktopServices.openUrl(name)
if not started:
E5MessageBox.critical(
self,
self.tr("eric6 Web Browser"),
self.tr(
"""<p>Could not start a viewer"""
""" for file <b>{0}</b>.</p>""")
.format(name.path()))
return
if requestData is not None:
self.load(*requestData)
else:
self.load(name)
def source(self):
"""
Public method to return the URL of the loaded page.
@return URL loaded in the help browser (QUrl)
"""
return self.url()
def documentTitle(self):
"""
Public method to return the title of the loaded page.
@return title (string)
"""
return self.title()
def backward(self):
"""
Public slot to move backwards in history.
"""
self.triggerPageAction(QWebPage.Back)
self.__urlChanged(self.history().currentItem().url())
def forward(self):
"""
Public slot to move forward in history.
"""
self.triggerPageAction(QWebPage.Forward)
self.__urlChanged(self.history().currentItem().url())
def home(self):
"""
Public slot to move to the first page loaded.
"""
homeUrl = QUrl(Preferences.getHelp("HomePage"))
self.setSource(homeUrl)
self.__urlChanged(self.history().currentItem().url())
def reload(self):
"""
Public slot to reload the current page.
"""
self.triggerPageAction(QWebPage.Reload)
def copy(self):
"""
Public slot to copy the selected text.
"""
self.triggerPageAction(QWebPage.Copy)
def isForwardAvailable(self):
"""
Public method to determine, if a forward move in history is possible.
@return flag indicating move forward is possible (boolean)
"""
return self.history().canGoForward()
def isBackwardAvailable(self):
"""
Public method to determine, if a backwards move in history is possible.
@return flag indicating move backwards is possible (boolean)
"""
return self.history().canGoBack()
def __levelForZoom(self, zoom):
"""
Private method determining the zoom level index given a zoom factor.
@param zoom zoom factor (integer)
@return index of zoom factor (integer)
"""
try:
index = self.__zoomLevels.index(zoom)
except ValueError:
for index in range(len(self.__zoomLevels)):
if zoom <= self.__zoomLevels[index]:
break
return index
def __applyZoom(self):
"""
Private slot to apply the current zoom factor.
"""
self.setZoomValue(self.__currentZoom)
def setZoomValue(self, value):
"""
Public method to set the zoom value.
@param value zoom value (integer)
"""
if value != self.zoomValue():
try:
self.setZoomFactor(value / 100.0)
except AttributeError:
self.setTextSizeMultiplier(value / 100.0)
self.zoomValueChanged.emit(value)
def zoomValue(self):
"""
Public method to get the current zoom value.
@return zoom value (integer)
"""
try:
val = self.zoomFactor() * 100
except AttributeError:
val = self.textSizeMultiplier() * 100
return int(val)
def zoomIn(self):
"""
Public slot to zoom into the page.
"""
index = self.__levelForZoom(self.__currentZoom)
if index < len(self.__zoomLevels) - 1:
self.__currentZoom = self.__zoomLevels[index + 1]
self.__applyZoom()
def zoomOut(self):
"""
Public slot to zoom out of the page.
"""
index = self.__levelForZoom(self.__currentZoom)
if index > 0:
self.__currentZoom = self.__zoomLevels[index - 1]
self.__applyZoom()
def zoomReset(self):
"""
Public method to reset the zoom factor.
"""
self.__currentZoom = self.__zoomLevels[HelpBrowser.ZoomLevelDefault]
self.__applyZoom()
def hasSelection(self):
"""
Public method to determine, if there is some text selected.
@return flag indicating text has been selected (boolean)
"""
return self.selectedText() != ""
def findNextPrev(self, txt, case, backwards, wrap, highlightAll):
"""
Public slot to find the next occurrence of a text.
@param txt text to search for (string)
@param case flag indicating a case sensitive search (boolean)
@param backwards flag indicating a backwards search (boolean)
@param wrap flag indicating to wrap around (boolean)
@param highlightAll flag indicating to highlight all occurrences
(boolean)
@return flag indicating that a match was found (boolean)
"""
findFlags = QWebPage.FindFlags()
if case:
findFlags |= QWebPage.FindCaseSensitively
if backwards:
findFlags |= QWebPage.FindBackward
if wrap:
findFlags |= QWebPage.FindWrapsAroundDocument
try:
if highlightAll:
findFlags |= QWebPage.HighlightAllOccurrences
except AttributeError:
pass
return self.findText(txt, findFlags)
def __isMediaElement(self, element):
"""
Private method to check, if the given element is a media element.
@param element element to be checked (QWebElement)
@return flag indicating a media element (boolean)
"""
return element.tagName().lower() in ["video", "audio"]
def contextMenuEvent(self, evt):
"""
Protected method called to create a context menu.
This method is overridden from QWebView.
@param evt reference to the context menu event object
(QContextMenuEvent)
"""
from .UserAgent.UserAgentMenu import UserAgentMenu
menu = QMenu(self)
frameAtPos = self.page().frameAt(evt.pos())
hit = self.page().mainFrame().hitTestContent(evt.pos())
if not hit.linkUrl().isEmpty():
menu.addAction(
UI.PixmapCache.getIcon("openNewTab.png"),
self.tr("Open Link in New Tab\tCtrl+LMB"),
self.__openLinkInNewTab).setData(hit.linkUrl())
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("download.png"),
self.tr("Save Lin&k"), self.__downloadLink)
menu.addAction(
UI.PixmapCache.getIcon("bookmark22.png"),
self.tr("Bookmark this Link"), self.__bookmarkLink)\
.setData(hit.linkUrl())
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("editCopy.png"),
self.tr("Copy Link to Clipboard"), self.__copyLink)
menu.addAction(
UI.PixmapCache.getIcon("mailSend.png"),
self.tr("Send Link"),
self.__sendLink).setData(hit.linkUrl())
if Preferences.getHelp("VirusTotalEnabled") and \
Preferences.getHelp("VirusTotalServiceKey") != "":
menu.addAction(
UI.PixmapCache.getIcon("virustotal.png"),
self.tr("Scan Link with VirusTotal"),
self.__virusTotal).setData(hit.linkUrl())
if not hit.imageUrl().isEmpty():
if not menu.isEmpty():
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("openNewTab.png"),
self.tr("Open Image in New Tab"),
self.__openLinkInNewTab).setData(hit.imageUrl())
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("download.png"),
self.tr("Save Image"), self.__downloadImage)
menu.addAction(
self.tr("Copy Image to Clipboard"), self.__copyImage)
menu.addAction(
UI.PixmapCache.getIcon("editCopy.png"),
self.tr("Copy Image Location to Clipboard"),
self.__copyLocation).setData(hit.imageUrl().toString())
menu.addAction(
UI.PixmapCache.getIcon("mailSend.png"),
self.tr("Send Image Link"),
self.__sendLink).setData(hit.imageUrl())
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("adBlockPlus.png"),
self.tr("Block Image"), self.__blockImage)\
.setData(hit.imageUrl().toString())
if Preferences.getHelp("VirusTotalEnabled") and \
Preferences.getHelp("VirusTotalServiceKey") != "":
menu.addAction(
UI.PixmapCache.getIcon("virustotal.png"),
self.tr("Scan Image with VirusTotal"),
self.__virusTotal).setData(hit.imageUrl())
element = hit.element()
if not element.isNull():
if self.__isMediaElement(element):
if not menu.isEmpty():
menu.addSeparator()
self.__clickedMediaElement = element
paused = element.evaluateJavaScript("this.paused")
muted = element.evaluateJavaScript("this.muted")
videoUrl = QUrl(element.evaluateJavaScript("this.currentSrc"))
if paused:
menu.addAction(
UI.PixmapCache.getIcon("mediaPlaybackStart.png"),
self.tr("Play"), self.__pauseMedia)
else:
menu.addAction(
UI.PixmapCache.getIcon("mediaPlaybackPause.png"),
self.tr("Pause"), self.__pauseMedia)
if muted:
menu.addAction(
UI.PixmapCache.getIcon("audioVolumeHigh.png"),
self.tr("Unmute"), self.__muteMedia)
else:
menu.addAction(
UI.PixmapCache.getIcon("audioVolumeMuted.png"),
self.tr("Mute"), self.__muteMedia)
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("editCopy.png"),
self.tr("Copy Media Address to Clipboard"),
self.__copyLocation).setData(videoUrl.toString())
menu.addAction(
UI.PixmapCache.getIcon("mailSend.png"),
self.tr("Send Media Address"), self.__sendLink)\
.setData(videoUrl)
menu.addAction(
UI.PixmapCache.getIcon("download.png"),
self.tr("Save Media"), self.__downloadMedia)\
.setData(videoUrl)
if element.tagName().lower() in ["input", "textarea"]:
if menu.isEmpty():
pageMenu = self.page().createStandardContextMenu()
directionFound = False
# used to detect double direction entry
for act in pageMenu.actions():
if act.isSeparator():
menu.addSeparator()
continue
if act.menu():
if self.pageAction(
QWebPage.SetTextDirectionDefault) in \
act.menu().actions():
if directionFound:
act.setVisible(False)
directionFound = True
elif self.pageAction(QWebPage.ToggleBold) in \
act.menu().actions():
act.setVisible(False)
elif act == self.pageAction(QWebPage.InspectElement):
# we have our own inspect entry
act.setVisible(False)
menu.addAction(act)
pageMenu = None
if not menu.isEmpty():
menu.addSeparator()
self.mw.personalInformationManager().createSubMenu(menu, self, hit)
menu.addAction(self.mw.newTabAct)
menu.addAction(self.mw.newAct)
menu.addSeparator()
menu.addAction(self.mw.saveAsAct)
menu.addSeparator()
if frameAtPos and self.page().mainFrame() != frameAtPos:
self.__clickedFrame = frameAtPos
fmenu = QMenu(self.tr("This Frame"))
frameUrl = self.__clickedFrame.url()
if frameUrl.isValid():
fmenu.addAction(
self.tr("Show &only this frame"),
self.__loadClickedFrame)
fmenu.addAction(
UI.PixmapCache.getIcon("openNewTab.png"),
self.tr("Show in new &tab"),
self.__openLinkInNewTab).setData(self.__clickedFrame.url())
fmenu.addSeparator()
fmenu.addAction(
UI.PixmapCache.getIcon("print.png"),
self.tr("&Print"), self.__printClickedFrame)
fmenu.addAction(
UI.PixmapCache.getIcon("printPreview.png"),
self.tr("Print Preview"), self.__printPreviewClickedFrame)
fmenu.addAction(
UI.PixmapCache.getIcon("printPdf.png"),
self.tr("Print as PDF"), self.__printPdfClickedFrame)
fmenu.addSeparator()
fmenu.addAction(
UI.PixmapCache.getIcon("zoomIn.png"),
self.tr("Zoom &in"), self.__zoomInClickedFrame)
fmenu.addAction(
UI.PixmapCache.getIcon("zoomReset.png"),
self.tr("Zoom &reset"), self.__zoomResetClickedFrame)
fmenu.addAction(
UI.PixmapCache.getIcon("zoomOut.png"),
self.tr("Zoom &out"), self.__zoomOutClickedFrame)
fmenu.addSeparator()
fmenu.addAction(
self.tr("Show frame so&urce"),
self.__showClickedFrameSource)
menu.addMenu(fmenu)
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("bookmark22.png"),
self.tr("Bookmark this Page"), self.addBookmark)
menu.addAction(
UI.PixmapCache.getIcon("mailSend.png"),
self.tr("Send Page Link"), self.__sendLink).setData(self.url())
menu.addSeparator()
self.__userAgentMenu = UserAgentMenu(self.tr("User Agent"),
url=self.url())
menu.addMenu(self.__userAgentMenu)
menu.addSeparator()
menu.addAction(self.mw.backAct)
menu.addAction(self.mw.forwardAct)
menu.addAction(self.mw.homeAct)
menu.addSeparator()
menu.addAction(self.mw.zoomInAct)
menu.addAction(self.mw.zoomResetAct)
menu.addAction(self.mw.zoomOutAct)
menu.addSeparator()
if self.selectedText():
menu.addAction(self.mw.copyAct)
menu.addAction(
UI.PixmapCache.getIcon("mailSend.png"),
self.tr("Send Text"),
self.__sendLink).setData(self.selectedText())
menu.addAction(self.mw.findAct)
menu.addSeparator()
if self.selectedText():
self.__searchMenu = menu.addMenu(self.tr("Search with..."))
from .OpenSearch.OpenSearchEngineAction import \
OpenSearchEngineAction
engineNames = self.mw.openSearchManager().allEnginesNames()
for engineName in engineNames:
engine = self.mw.openSearchManager().engine(engineName)
act = OpenSearchEngineAction(engine, self.__searchMenu)
act.setData(engineName)
self.__searchMenu.addAction(act)
self.__searchMenu.triggered.connect(self.__searchRequested)
menu.addSeparator()
from .HelpLanguagesDialog import HelpLanguagesDialog
languages = Preferences.toList(
Preferences.Prefs.settings.value(
"Help/AcceptLanguages",
HelpLanguagesDialog.defaultAcceptLanguages()))
if languages:
language = languages[0]
langCode = language.split("[")[1][:2]
googleTranslatorUrl = QUrl(
"http://translate.google.com/#auto|{0}|{1}".format(
langCode, self.selectedText()))
menu.addAction(
UI.PixmapCache.getIcon("translate.png"),
self.tr("Google Translate"), self.__openLinkInNewTab)\
.setData(googleTranslatorUrl)
wiktionaryUrl = QUrl(
"http://{0}.wiktionary.org/wiki/Special:Search?search={1}"
.format(langCode, self.selectedText()))
menu.addAction(
UI.PixmapCache.getIcon("wikipedia.png"),
self.tr("Dictionary"), self.__openLinkInNewTab)\
.setData(wiktionaryUrl)
menu.addSeparator()
guessedUrl = QUrl.fromUserInput(self.selectedText().strip())
if self.__isUrlValid(guessedUrl):
menu.addAction(
self.tr("Go to web address"),
self.__openLinkInNewTab).setData(guessedUrl)
menu.addSeparator()
element = hit.element()
if not element.isNull() and \
element.tagName().lower() == "input" and \
element.attribute("type", "text") == "text":
menu.addAction(self.tr("Add to web search toolbar"),
self.__addSearchEngine).setData(element)
menu.addSeparator()
menu.addAction(
UI.PixmapCache.getIcon("webInspector.png"),
self.tr("Web Inspector..."), self.__webInspector)
menu.exec_(evt.globalPos())
def __isUrlValid(self, url):
"""
Private method to check a URL for validity.
@param url URL to be checked (QUrl)
@return flag indicating a valid URL (boolean)
"""
return url.isValid() and \
bool(url.host()) and \
bool(url.scheme()) and \
"." in url.host()
def __openLinkInNewTab(self):
"""
Private method called by the context menu to open a link in a new
window.
"""
act = self.sender()
url = act.data()
if url.isEmpty():
return
self.ctrlPressed = True
self.setSource(url)
self.ctrlPressed = False
def __bookmarkLink(self):
"""
Private slot to bookmark a link via the context menu.
"""
act = self.sender()
url = act.data()
if url.isEmpty():
return
from .Bookmarks.AddBookmarkDialog import AddBookmarkDialog
dlg = AddBookmarkDialog()
dlg.setUrl(bytes(url.toEncoded()).decode())
dlg.exec_()
def __sendLink(self):
"""
Private slot to send a link via email.
"""
act = self.sender()
data = act.data()
if isinstance(data, QUrl) and data.isEmpty():
return
if isinstance(data, QUrl):
data = data.toString()
QDesktopServices.openUrl(QUrl("mailto:?body=" + data))
def __downloadLink(self):
"""
Private slot to download a link and save it to disk.
"""
self.pageAction(QWebPage.DownloadLinkToDisk).trigger()
def __copyLink(self):
"""
Private slot to copy a link to the clipboard.
"""
self.pageAction(QWebPage.CopyLinkToClipboard).trigger()
def __downloadImage(self):
"""
Private slot to download an image and save it to disk.
"""
self.pageAction(QWebPage.DownloadImageToDisk).trigger()
def __copyImage(self):
"""
Private slot to copy an image to the clipboard.
"""
self.pageAction(QWebPage.CopyImageToClipboard).trigger()
def __copyLocation(self):
"""
Private slot to copy an image or media location to the clipboard.
"""
act = self.sender()
url = act.data()
QApplication.clipboard().setText(url)
def __blockImage(self):
"""
Private slot to add a block rule for an image URL.
"""
import Helpviewer.HelpWindow
act = self.sender()
url = act.data()
dlg = Helpviewer.HelpWindow.HelpWindow.adBlockManager().showDialog()
dlg.addCustomRule(url)
def __downloadMedia(self):
"""
Private slot to download a media and save it to disk.
"""
act = self.sender()
url = act.data()
self.mw.downloadManager().download(url, True, mainWindow=self.mw)
def __pauseMedia(self):
"""
Private slot to pause or play the selected media.
"""
paused = self.__clickedMediaElement.evaluateJavaScript("this.paused")
if paused:
self.__clickedMediaElement.evaluateJavaScript("this.play()")
else:
self.__clickedMediaElement.evaluateJavaScript("this.pause()")
def __muteMedia(self):
"""
Private slot to (un)mute the selected media.
"""
muted = self.__clickedMediaElement.evaluateJavaScript("this.muted")
if muted:
self.__clickedMediaElement.evaluateJavaScript("this.muted = false")
else:
self.__clickedMediaElement.evaluateJavaScript("this.muted = true")
def __virusTotal(self):
"""
Private slot to scan the selected URL with VirusTotal.
"""
act = self.sender()
url = act.data()
self.mw.requestVirusTotalScan(url)
def __searchRequested(self, act):
"""
Private slot to search for some text with a selected search engine.
@param act reference to the action that triggered this slot (QAction)
"""
searchText = self.selectedText()
if not searchText:
return
engineName = act.data()
if engineName:
engine = self.mw.openSearchManager().engine(engineName)
self.search.emit(engine.searchUrl(searchText))
def __addSearchEngine(self):
"""
Private slot to add a new search engine.
"""
act = self.sender()
if act is None:
return
element = act.data()
elementName = element.attribute("name")
formElement = QWebElement(element)
while formElement.tagName().lower() != "form":
formElement = formElement.parent()
if formElement.isNull() or \
formElement.attribute("action") == "":
return
method = formElement.attribute("method", "get").lower()
if method != "get":
E5MessageBox.warning(
self,
self.tr("Method not supported"),
self.tr(
"""{0} method is not supported.""").format(method.upper()))
return
searchUrl = QUrl(self.page().mainFrame().baseUrl().resolved(
QUrl(formElement.attribute("action"))))
if searchUrl.scheme() != "http":
return
if qVersion() >= "5.0.0":
from PyQt5.QtCore import QUrlQuery
searchUrlQuery = QUrlQuery(searchUrl)
searchEngines = {}
inputFields = formElement.findAll("input")
for inputField in inputFields.toList():
type_ = inputField.attribute("type", "text")
name = inputField.attribute("name")
value = inputField.evaluateJavaScript("this.value")
if type_ == "submit":
searchEngines[value] = name
elif type_ == "text":
if inputField == element:
value = "{searchTerms}"
if qVersion() >= "5.0.0":
searchUrlQuery.addQueryItem(name, value)
else:
searchUrl.addQueryItem(name, value)
elif type_ == "checkbox" or type_ == "radio":
if inputField.evaluateJavaScript("this.checked"):
if qVersion() >= "5.0.0":
searchUrlQuery.addQueryItem(name, value)
else:
searchUrl.addQueryItem(name, value)
elif type_ == "hidden":
if qVersion() >= "5.0.0":
searchUrlQuery.addQueryItem(name, value)
else:
searchUrl.addQueryItem(name, value)
selectFields = formElement.findAll("select")
for selectField in selectFields.toList():
name = selectField.attribute("name")
selectedIndex = selectField.evaluateJavaScript(
"this.selectedIndex")
if selectedIndex == -1:
continue
options = selectField.findAll("option")
value = options.at(selectedIndex).toPlainText()
if qVersion() >= "5.0.0":
searchUrlQuery.addQueryItem(name, value)
else:
searchUrl.addQueryItem(name, value)
ok = True
if len(searchEngines) > 1:
searchEngine, ok = QInputDialog.getItem(
self,
self.tr("Search engine"),
self.tr("Choose the desired search engine"),
sorted(searchEngines.keys()), 0, False)
if not ok:
return
if searchEngines[searchEngine] != "":
if qVersion() >= "5.0.0":
searchUrlQuery.addQueryItem(
searchEngines[searchEngine], searchEngine)
else:
searchUrl.addQueryItem(
searchEngines[searchEngine], searchEngine)
engineName = ""
labels = formElement.findAll('label[for="{0}"]'.format(elementName))
if labels.count() > 0:
engineName = labels.at(0).toPlainText()
engineName, ok = QInputDialog.getText(
self,
self.tr("Engine name"),
self.tr("Enter a name for the engine"),
QLineEdit.Normal,
engineName)
if not ok:
return
if qVersion() >= "5.0.0":
searchUrl.setQuery(searchUrlQuery)
from .OpenSearch.OpenSearchEngine import OpenSearchEngine
engine = OpenSearchEngine()
engine.setName(engineName)
engine.setDescription(engineName)
engine.setSearchUrlTemplate(searchUrl.toString())
engine.setImage(self.icon().pixmap(16, 16).toImage())
self.mw.openSearchManager().addEngine(engine)
def __webInspector(self):
"""
Private slot to show the web inspector window.
"""
self.triggerPageAction(QWebPage.InspectElement)
def addBookmark(self):
"""
Public slot to bookmark the current page.
"""
from .Bookmarks.AddBookmarkDialog import AddBookmarkDialog
dlg = AddBookmarkDialog()
dlg.setUrl(bytes(self.url().toEncoded()).decode())
dlg.setTitle(self.title())
meta = self.page().mainFrame().metaData()
if "description" in meta:
dlg.setDescription(meta["description"][0])
dlg.exec_()
def dragEnterEvent(self, evt):
"""
Protected method called by a drag enter event.
@param evt reference to the drag enter event (QDragEnterEvent)
"""
evt.acceptProposedAction()
def dragMoveEvent(self, evt):
"""
Protected method called by a drag move event.
@param evt reference to the drag move event (QDragMoveEvent)
"""
evt.ignore()
if evt.source() != self:
if len(evt.mimeData().urls()) > 0:
evt.acceptProposedAction()
else:
url = QUrl(evt.mimeData().text())
if url.isValid():
evt.acceptProposedAction()
if not evt.isAccepted():
super(HelpBrowser, self).dragMoveEvent(evt)
def dropEvent(self, evt):
"""
Protected method called by a drop event.
@param evt reference to the drop event (QDropEvent)
"""
super(HelpBrowser, self).dropEvent(evt)
if not evt.isAccepted() and \
evt.source() != self and \
evt.possibleActions() & Qt.CopyAction:
url = QUrl()
if len(evt.mimeData().urls()) > 0:
url = evt.mimeData().urls()[0]
if not url.isValid():
url = QUrl(evt.mimeData().text())
if url.isValid():
self.setSource(url)
evt.acceptProposedAction()
def mousePressEvent(self, evt):
"""
Protected method called by a mouse press event.
@param evt reference to the mouse event (QMouseEvent)
"""
self.mw.setEventMouseButtons(evt.buttons())
self.mw.setEventKeyboardModifiers(evt.modifiers())
if evt.button() == Qt.XButton1:
self.pageAction(QWebPage.Back).trigger()
elif evt.button() == Qt.XButton2:
self.pageAction(QWebPage.Forward).trigger()
else:
super(HelpBrowser, self).mousePressEvent(evt)
def mouseReleaseEvent(self, evt):
"""
Protected method called by a mouse release event.
@param evt reference to the mouse event (QMouseEvent)
"""
accepted = evt.isAccepted()
self.__page.event(evt)
if not evt.isAccepted() and \
self.mw.eventMouseButtons() & Qt.MidButton:
url = QUrl(QApplication.clipboard().text(QClipboard.Selection))
if not url.isEmpty() and \
url.isValid() and \
url.scheme() != "":
self.mw.setEventMouseButtons(Qt.NoButton)
self.mw.setEventKeyboardModifiers(Qt.NoModifier)
self.setSource(url)
evt.setAccepted(accepted)
def wheelEvent(self, evt):
"""
Protected method to handle wheel events.
@param evt reference to the wheel event (QWheelEvent)
"""
if qVersion() >= "5.0.0":
delta = evt.angleDelta().y()
else:
delta = evt.delta()
if evt.modifiers() & Qt.ControlModifier:
if delta < 0:
self.zoomOut()
else:
self.zoomIn()
evt.accept()
return
if evt.modifiers() & Qt.ShiftModifier:
if delta < 0:
self.backward()
else:
self.forward()
evt.accept()
return
super(HelpBrowser, self).wheelEvent(evt)
def keyPressEvent(self, evt):
"""
Protected method called by a key press.
@param evt reference to the key event (QKeyEvent)
"""
if self.mw.personalInformationManager().viewKeyPressEvent(self, evt):
return
if self.__enableAccessKeys:
self.__accessKeysPressed = (
evt.modifiers() == Qt.ControlModifier and
evt.key() == Qt.Key_Control)
if not self.__accessKeysPressed:
if self.__checkForAccessKey(evt):
self.__hideAccessKeys()
evt.accept()
return
self.__hideAccessKeys()
else:
QTimer.singleShot(300, self.__accessKeyShortcut)
self.ctrlPressed = (evt.key() == Qt.Key_Control)
super(HelpBrowser, self).keyPressEvent(evt)
def keyReleaseEvent(self, evt):
"""
Protected method called by a key release.
@param evt reference to the key event (QKeyEvent)
"""
if self.__enableAccessKeys:
self.__accessKeysPressed = evt.key() == Qt.Key_Control
self.ctrlPressed = False
super(HelpBrowser, self).keyReleaseEvent(evt)
def focusOutEvent(self, evt):
"""
Protected method called by a focus out event.
@param evt reference to the focus event (QFocusEvent)
"""
if self.__accessKeysPressed:
self.__hideAccessKeys()
self.__accessKeysPressed = False
super(HelpBrowser, self).focusOutEvent(evt)
def event(self, evt):
"""
Public method handling events.
@param evt reference to the event (QEvent)
@return flag indicating, if the event was handled (boolean)
"""
if evt.type() == QEvent.Gesture:
self.gestureEvent(evt)
return True
return super(HelpBrowser, self).event(evt)
def gestureEvent(self, evt):
"""
Protected method handling gesture events.
@param evt reference to the gesture event (QGestureEvent
"""
pinch = evt.gesture(Qt.PinchGesture)
if pinch:
if pinch.state() == Qt.GestureStarted:
pinch.setScaleFactor(self.__currentZoom / 100.0)
else:
scaleFactor = pinch.scaleFactor()
self.__currentZoom = int(scaleFactor * 100)
self.__applyZoom()
evt.accept()
def clearHistory(self):
"""
Public slot to clear the history.
"""
self.history().clear()
self.__urlChanged(self.history().currentItem().url())
###########################################################################
## Signal converters below
###########################################################################
def __urlChanged(self, url):
"""
Private slot to handle the urlChanged signal.
@param url the new url (QUrl)
"""
self.sourceChanged.emit(url)
self.forwardAvailable.emit(self.isForwardAvailable())
self.backwardAvailable.emit(self.isBackwardAvailable())
def __statusBarMessage(self, text):
"""
Private slot to handle the statusBarMessage signal.
@param text text to be shown in the status bar (string)
"""
self.mw.statusBar().showMessage(text)
def __linkHovered(self, link, title, textContent):
"""
Private slot to handle the linkHovered signal.
@param link the URL of the link (string)
@param title the link title (string)
@param textContent text content of the link (string)
"""
self.highlighted.emit(link)
###########################################################################
## Signal handlers below
###########################################################################
def __loadStarted(self):
"""
Private method to handle the loadStarted signal.
"""
self.__isLoading = True
self.__progress = 0
def __loadProgress(self, progress):
"""
Private method to handle the loadProgress signal.
@param progress progress value (integer)
"""
self.__progress = progress
def __loadFinished(self, ok):
"""
Private method to handle the loadFinished signal.
@param ok flag indicating the result (boolean)
"""
self.__isLoading = False
self.__progress = 0
if Preferences.getHelp("ClickToFlashEnabled"):
# this is a hack to make the ClickToFlash button appear
self.zoomIn()
self.zoomOut()
if ok:
self.mw.adBlockManager().page().hideBlockedPageEntries(self.page())
self.mw.passwordManager().fill(self.page())
def isLoading(self):
"""
Public method to get the loading state.
@return flag indicating the loading state (boolean)
"""
return self.__isLoading
def progress(self):
"""
Public method to get the load progress.
@return load progress (integer)
"""
return self.__progress
def saveAs(self):
"""
Public method to save the current page to a file.
"""
url = self.url()
if url.isEmpty():
return
self.mw.downloadManager().download(url, True, mainWindow=self.mw)
def __unsupportedContent(self, reply, requestFilename=None,
download=False):
"""
Private slot to handle the unsupportedContent signal.
@param reply reference to the reply object (QNetworkReply)
@keyparam requestFilename indicating to ask for a filename
(boolean or None). If it is None, the behavior is determined
by a configuration option.
@keyparam download flag indicating a download operation (boolean)
"""
if reply is None:
return
replyUrl = reply.url()
if replyUrl.scheme() == "abp":
return
if reply.error() == QNetworkReply.NoError:
if reply.header(QNetworkRequest.ContentTypeHeader):
self.mw.downloadManager().handleUnsupportedContent(
reply, webPage=self.page(), mainWindow=self.mw)
return
replyUrl = reply.url()
if replyUrl.isEmpty():
return
notFoundFrame = self.page().mainFrame()
if notFoundFrame is None:
return
if reply.header(QNetworkRequest.ContentTypeHeader):
data = reply.readAll()
if contentSniff(data):
notFoundFrame.setHtml(str(data, encoding="utf-8"), replyUrl)
return
urlString = bytes(replyUrl.toEncoded()).decode()
title = self.tr("Error loading page: {0}").format(urlString)
htmlFile = QFile(":/html/notFoundPage.html")
htmlFile.open(QFile.ReadOnly)
html = htmlFile.readAll()
pixmap = qApp.style()\
.standardIcon(QStyle.SP_MessageBoxWarning).pixmap(48, 48)
imageBuffer = QBuffer()
imageBuffer.open(QIODevice.ReadWrite)
if pixmap.save(imageBuffer, "PNG"):
html = html.replace("@IMAGE@", imageBuffer.buffer().toBase64())
pixmap = qApp.style()\
.standardIcon(QStyle.SP_MessageBoxWarning).pixmap(16, 16)
imageBuffer = QBuffer()
imageBuffer.open(QIODevice.ReadWrite)
if pixmap.save(imageBuffer, "PNG"):
html = html.replace("@FAVICON@", imageBuffer.buffer().toBase64())
html = html.replace("@TITLE@", title.encode("utf8"))
html = html.replace("@H1@", reply.errorString().encode("utf8"))
html = html.replace(
"@H2@", self.tr("When connecting to: {0}.")
.format(urlString).encode("utf8"))
html = html.replace(
"@LI-1@",
self.tr("Check the address for errors such as "
"<b>ww</b>.example.org instead of "
"<b>www</b>.example.org").encode("utf8"))
html = html.replace(
"@LI-2@",
self.tr("If the address is correct, try checking the network "
"connection.").encode("utf8"))
html = html.replace(
"@LI-3@",
self.tr(
"If your computer or network is protected by a firewall "
"or proxy, make sure that the browser is permitted to "
"access the network.").encode("utf8"))
html = html.replace(
"@LI-4@",
self.tr("If your cache policy is set to offline browsing,"
"only pages in the local cache are available.")
.encode("utf8"))
html = html.replace(
"@BUTTON@", self.tr("Try Again").encode("utf8"))
notFoundFrame.setHtml(bytes(html).decode("utf8"), replyUrl)
self.mw.historyManager().removeHistoryEntry(replyUrl, self.title())
self.loadFinished.emit(False)
def __downloadRequested(self, request):
"""
Private slot to handle a download request.
@param request reference to the request object (QNetworkRequest)
"""
self.mw.downloadManager().download(request, mainWindow=self.mw)
def __databaseQuotaExceeded(self, frame, databaseName):
"""
Private slot to handle the case, where the database quota is exceeded.
@param frame reference to the frame (QWebFrame)
@param databaseName name of the web database (string)
"""
securityOrigin = frame.securityOrigin()
if securityOrigin.databaseQuota() > 0 and \
securityOrigin.databaseUsage() == 0:
# cope with a strange behavior of Qt 4.6, if a database is
# accessed for the first time
return
res = E5MessageBox.yesNo(
self,
self.tr("Web Database Quota"),
self.tr(
"""<p>The database quota of <strong>{0}</strong> has"""
""" been exceeded while accessing database <strong>{1}"""
"""</strong>.</p><p>Shall it be changed?</p>""")
.format(self.__dataString(securityOrigin.databaseQuota()),
databaseName),
yesDefault=True)
if res:
newQuota, ok = QInputDialog.getInt(
self,
self.tr("New Web Database Quota"),
self.tr(
"Enter the new quota in MB (current = {0}, used = {1}; "
"step size = 5 MB):"
.format(
self.__dataString(securityOrigin.databaseQuota()),
self.__dataString(securityOrigin.databaseUsage()))),
securityOrigin.databaseQuota() // (1024 * 1024),
0, 2147483647, 5)
if ok:
securityOrigin.setDatabaseQuota(newQuota * 1024 * 1024)
def __dataString(self, size):
"""
Private method to generate a formatted data string.
@param size size to be formatted (integer)
@return formatted data string (string)
"""
unit = ""
if size < 1024:
unit = self.tr("bytes")
elif size < 1024 * 1024:
size /= 1024
unit = self.tr("kB")
else:
size /= 1024 * 1024
unit = self.tr("MB")
return "{0:.1f} {1}".format(size, unit)
###########################################################################
## Access key related methods below
###########################################################################
def __accessKeyShortcut(self):
"""
Private slot to switch the display of access keys.
"""
if not self.hasFocus() or \
not self.__accessKeysPressed or \
not self.__enableAccessKeys:
return
if self.__accessKeyLabels:
self.__hideAccessKeys()
else:
self.__showAccessKeys()
self.__accessKeysPressed = False
def __checkForAccessKey(self, evt):
"""
Private method to check the existence of an access key and activate the
corresponding link.
@param evt reference to the key event (QKeyEvent)
@return flag indicating, if the event was handled (boolean)
"""
if not self.__accessKeyLabels:
return False
text = evt.text()
if not text:
return False
key = text[0].upper()
handled = False
if key in self.__accessKeyNodes:
element = self.__accessKeyNodes[key]
p = element.geometry().center()
frame = element.webFrame()
p -= frame.scrollPosition()
frame = frame.parentFrame()
while frame and frame != self.page().mainFrame():
p -= frame.scrollPosition()
frame = frame.parentFrame()
pevent = QMouseEvent(
QEvent.MouseButtonPress, p, Qt.LeftButton,
Qt.MouseButtons(Qt.NoButton),
Qt.KeyboardModifiers(Qt.NoModifier))
qApp.sendEvent(self, pevent)
revent = QMouseEvent(
QEvent.MouseButtonRelease, p, Qt.LeftButton,
Qt.MouseButtons(Qt.NoButton),
Qt.KeyboardModifiers(Qt.NoModifier))
qApp.sendEvent(self, revent)
handled = True
return handled
def __hideAccessKeys(self):
"""
Private slot to hide the access key labels.
"""
if self.__accessKeyLabels:
for label in self.__accessKeyLabels:
label.hide()
label.deleteLater()
self.__accessKeyLabels = []
self.__accessKeyNodes = {}
self.update()
def __showAccessKeys(self):
"""
Private method to show the access key labels.
"""
supportedElements = [
"input", "a", "area", "button", "label", "legend", "textarea",
]
unusedKeys = "A B C D E F G H I J K L M N O P Q R S T U V W X Y Z" \
" 0 1 2 3 4 5 6 7 8 9".split()
viewport = QRect(self.__page.mainFrame().scrollPosition(),
self.__page.viewportSize())
# Priority first goes to elements with accesskey attributes
alreadyLabeled = []
for elementType in supportedElements:
result = self.page().mainFrame().findAllElements(elementType)\
.toList()
for element in result:
geometry = element.geometry()
if geometry.size().isEmpty() or \
not viewport.contains(geometry.topLeft()):
continue
accessKeyAttribute = element.attribute("accesskey").upper()
if not accessKeyAttribute:
continue
accessKey = ""
i = 0
while i < len(accessKeyAttribute):
if accessKeyAttribute[i] in unusedKeys:
accessKey = accessKeyAttribute[i]
break
i += 2
if accessKey == "":
continue
unusedKeys.remove(accessKey)
self.__makeAccessLabel(accessKey, element)
alreadyLabeled.append(element)
# Pick an access key first from the letters in the text and then
# from the list of unused access keys
for elementType in supportedElements:
result = self.page().mainFrame().findAllElements(elementType)\
.toList()
for element in result:
geometry = element.geometry()
if not unusedKeys or \
element in alreadyLabeled or \
geometry.size().isEmpty() or \
not viewport.contains(geometry.topLeft()):
continue
accessKey = ""
text = element.toPlainText().upper()
for c in text:
if c in unusedKeys:
accessKey = c
break
if accessKey == "":
accessKey = unusedKeys[0]
unusedKeys.remove(accessKey)
self.__makeAccessLabel(accessKey, element)
def __makeAccessLabel(self, accessKey, element):
"""
Private method to generate the access label for an element.
@param accessKey access key to generate the label for (str)
@param element reference to the web element to create the label for
(QWebElement)
"""
label = QLabel(self)
label.setText("<qt><b>{0}</b></qt>".format(accessKey))
p = QToolTip.palette()
color = QColor(Qt.yellow).lighter(150)
color.setAlpha(175)
p.setColor(QPalette.Window, color)
label.setPalette(p)
label.setAutoFillBackground(True)
label.setFrameStyle(QFrame.Box | QFrame.Plain)
point = element.geometry().center()
point -= self.__page.mainFrame().scrollPosition()
label.move(point)
label.show()
point.setX(point.x() - label.width() // 2)
label.move(point)
self.__accessKeyLabels.append(label)
self.__accessKeyNodes[accessKey] = element
###########################################################################
## Miscellaneous methods below
###########################################################################
def createWindow(self, windowType):
"""
Public method called, when a new window should be created.
@param windowType type of the requested window (QWebPage.WebWindowType)
@return reference to the created browser window (HelpBrowser)
"""
self.mw.newTab(addNextTo=self)
return self.mw.currentBrowser()
def preferencesChanged(self):
"""
Public method to indicate a change of the settings.
"""
self.__enableAccessKeys = Preferences.getHelp("AccessKeysEnabled")
if not self.__enableAccessKeys:
self.__hideAccessKeys()
self.reload()
###########################################################################
## RSS related methods below
###########################################################################
def checkRSS(self):
"""
Public method to check, if the loaded page contains feed links.
@return flag indicating the existence of feed links (boolean)
"""
self.__rss = []
frame = self.page().mainFrame()
linkElementsList = frame.findAllElements("link").toList()
for linkElement in linkElementsList:
# only atom+xml and rss+xml will be processed
if linkElement.attribute("rel") != "alternate" or \
(linkElement.attribute("type") != "application/rss+xml" and
linkElement.attribute("type") != "application/atom+xml"):
continue
title = linkElement.attribute("title")
href = linkElement.attribute("href")
if href == "" or title == "":
continue
self.__rss.append((title, href))
return len(self.__rss) > 0
def getRSS(self):
"""
Public method to get the extracted RSS feeds.
@return list of RSS feeds (list of tuples of two strings)
"""
return self.__rss
def hasRSS(self):
"""
Public method to check, if the loaded page has RSS links.
@return flag indicating the presence of RSS links (boolean)
"""
return len(self.__rss) > 0
###########################################################################
## Clicked Frame slots
###########################################################################
def __loadClickedFrame(self):
"""
Private slot to load the selected frame only.
"""
self.setSource(self.__clickedFrame.url())
def __printClickedFrame(self):
"""
Private slot to print the selected frame.
"""
printer = QPrinter(mode=QPrinter.HighResolution)
if Preferences.getPrinter("ColorMode"):
printer.setColorMode(QPrinter.Color)
else:
printer.setColorMode(QPrinter.GrayScale)
if Preferences.getPrinter("FirstPageFirst"):
printer.setPageOrder(QPrinter.FirstPageFirst)
else:
printer.setPageOrder(QPrinter.LastPageFirst)
printer.setPageMargins(
Preferences.getPrinter("LeftMargin") * 10,
Preferences.getPrinter("TopMargin") * 10,
Preferences.getPrinter("RightMargin") * 10,
Preferences.getPrinter("BottomMargin") * 10,
QPrinter.Millimeter
)
printerName = Preferences.getPrinter("PrinterName")
if printerName:
printer.setPrinterName(printerName)
printDialog = QPrintDialog(printer, self)
if printDialog.exec_() == QDialog.Accepted:
try:
self.__clickedFrame.print_(printer)
except AttributeError:
E5MessageBox.critical(
self,
self.tr("eric6 Web Browser"),
self.tr(
"""<p>Printing is not available due to a bug in"""
""" PyQt5. Please upgrade.</p>"""))
def __printPreviewClickedFrame(self):
"""
Private slot to show a print preview of the clicked frame.
"""
from PyQt5.QtPrintSupport import QPrintPreviewDialog
printer = QPrinter(mode=QPrinter.HighResolution)
if Preferences.getPrinter("ColorMode"):
printer.setColorMode(QPrinter.Color)
else:
printer.setColorMode(QPrinter.GrayScale)
if Preferences.getPrinter("FirstPageFirst"):
printer.setPageOrder(QPrinter.FirstPageFirst)
else:
printer.setPageOrder(QPrinter.LastPageFirst)
printer.setPageMargins(
Preferences.getPrinter("LeftMargin") * 10,
Preferences.getPrinter("TopMargin") * 10,
Preferences.getPrinter("RightMargin") * 10,
Preferences.getPrinter("BottomMargin") * 10,
QPrinter.Millimeter
)
printerName = Preferences.getPrinter("PrinterName")
if printerName:
printer.setPrinterName(printerName)
preview = QPrintPreviewDialog(printer, self)
preview.paintRequested.connect(self.__generatePrintPreviewClickedFrame)
preview.exec_()
def __generatePrintPreviewClickedFrame(self, printer):
"""
Private slot to generate a print preview of the clicked frame.
@param printer reference to the printer object (QPrinter)
"""
try:
self.__clickedFrame.print_(printer)
except AttributeError:
E5MessageBox.critical(
self,
self.tr("eric6 Web Browser"),
self.tr(
"""<p>Printing is not available due to a bug in PyQt5."""
"""Please upgrade.</p>"""))
return
def __printPdfClickedFrame(self):
"""
Private slot to print the selected frame to PDF.
"""
printer = QPrinter(mode=QPrinter.HighResolution)
if Preferences.getPrinter("ColorMode"):
printer.setColorMode(QPrinter.Color)
else:
printer.setColorMode(QPrinter.GrayScale)
printerName = Preferences.getPrinter("PrinterName")
if printerName:
printer.setPrinterName(printerName)
printer.setOutputFormat(QPrinter.PdfFormat)
name = self.__clickedFrame.url().path().rsplit('/', 1)[-1]
if name:
name = name.rsplit('.', 1)[0]
name += '.pdf'
printer.setOutputFileName(name)
printDialog = QPrintDialog(printer, self)
if printDialog.exec_() == QDialog.Accepted:
try:
self.__clickedFrame.print_(printer)
except AttributeError:
E5MessageBox.critical(
self,
self.tr("eric6 Web Browser"),
self.tr(
"""<p>Printing is not available due to a bug in"""
""" PyQt5. Please upgrade.</p>"""))
return
def __zoomInClickedFrame(self):
"""
Private slot to zoom into the clicked frame.
"""
index = self.__levelForZoom(
int(self.__clickedFrame.zoomFactor() * 100))
if index < len(self.__zoomLevels) - 1:
self.__clickedFrame.setZoomFactor(
self.__zoomLevels[index + 1] / 100)
def __zoomResetClickedFrame(self):
"""
Private slot to reset the zoom factor of the clicked frame.
"""
self.__clickedFrame.setZoomFactor(self.__currentZoom / 100)
def __zoomOutClickedFrame(self):
"""
Private slot to zoom out of the clicked frame.
"""
index = self.__levelForZoom(
int(self.__clickedFrame.zoomFactor() * 100))
if index > 0:
self.__clickedFrame.setZoomFactor(
self.__zoomLevels[index - 1] / 100)
def __showClickedFrameSource(self):
"""
Private slot to show the source of the clicked frame.
"""
from QScintilla.MiniEditor import MiniEditor
src = self.__clickedFrame.toHtml()
editor = MiniEditor(parent=self)
editor.setText(src, "Html")
editor.setLanguage("dummy.html")
editor.show()
def contentSniff(data):
"""
Module function to do some content sniffing to check, if the data is HTML.
@param data data block to sniff at (string)
@return flag indicating HTML content (boolean)
"""
if data.contains("<!doctype") or \
data.contains("<script") or \
data.contains("<html") or \
data.contains("<!--") or \
data.contains("<head") or \
data.contains("<iframe") or \
data.contains("<h1") or \
data.contains("<div") or \
data.contains("<font") or \
data.contains("<table") or \
data.contains("<a") or \
data.contains("<style") or \
data.contains("<title") or \
data.contains("<b") or \
data.contains("<body") or \
data.contains("<br") or \
data.contains("<p"):
return True
return False
|
gpl-3.0
| -5,708,538,440,104,066,000
| 35.92099
| 79
| 0.535364
| false
| 4.741902
| false
| false
| false
|
souravbadami/oppia
|
core/domain/event_services.py
|
1
|
10774
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes for handling events."""
import inspect
from core import jobs_registry
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import stats_domain
from core.domain import stats_services
from core.platform import models
from core.platform.taskqueue import gae_taskqueue_services as taskqueue_services
import feconf
(stats_models, feedback_models) = models.Registry.import_models([
models.NAMES.statistics, models.NAMES.feedback])
taskqueue_services = models.Registry.import_taskqueue_services()
class BaseEventHandler(object):
"""Base class for event dispatchers."""
# A string denoting the type of the event. Should be specified by
# subclasses and considered immutable.
EVENT_TYPE = None
@classmethod
def _notify_continuous_computation_listeners_async(cls, *args, **kwargs):
"""Dispatch events asynchronously to continuous computation realtime
layers that are listening for them.
"""
taskqueue_services.defer(
jobs_registry.ContinuousComputationEventDispatcher.dispatch_event,
taskqueue_services.QUEUE_NAME_EVENTS, cls.EVENT_TYPE, *args,
**kwargs)
@classmethod
def _handle_event(cls, *args, **kwargs):
"""Perform in-request processing of an incoming event."""
raise NotImplementedError(
'Subclasses of BaseEventHandler should implement the '
'_handle_event() method, using explicit arguments '
'(no *args or **kwargs).')
@classmethod
def record(cls, *args, **kwargs):
"""Process incoming events.
Callers of event handlers should call this method, not _handle_event().
"""
cls._notify_continuous_computation_listeners_async(*args, **kwargs)
cls._handle_event(*args, **kwargs)
class StatsEventsHandler(BaseEventHandler):
"""Event handler for incremental update of analytics model using aggregated
stats data.
"""
EVENT_TYPE = feconf.EVENT_TYPE_ALL_STATS
@classmethod
def _is_latest_version(cls, exp_id, exp_version):
"""Verifies whether the exploration version for the stats to be stored
corresponds to the latest version of the exploration.
"""
exploration = exp_fetchers.get_exploration_by_id(exp_id)
return exploration.version == exp_version
@classmethod
def _handle_event(cls, exploration_id, exp_version, aggregated_stats):
if cls._is_latest_version(exploration_id, exp_version):
taskqueue_services.defer(
stats_services.update_stats,
taskqueue_services.QUEUE_NAME_STATS, exploration_id,
exp_version, aggregated_stats)
class AnswerSubmissionEventHandler(BaseEventHandler):
"""Event handler for recording answer submissions."""
EVENT_TYPE = feconf.EVENT_TYPE_ANSWER_SUBMITTED
@classmethod
def _notify_continuous_computation_listeners_async(cls, *args, **kwargs):
# Disable this method until we can deal with large answers, otherwise
# the data that is being placed on the task queue is too large.
pass
@classmethod
def _handle_event(
cls, exploration_id, exploration_version, state_name,
interaction_id, answer_group_index, rule_spec_index,
classification_categorization, session_id, time_spent_in_secs,
params, normalized_answer):
"""Records an event when an answer triggers a rule. The answer recorded
here is a Python-representation of the actual answer submitted by the
user.
"""
# TODO(sll): Escape these args?
stats_services.record_answer(
exploration_id, exploration_version, state_name, interaction_id,
stats_domain.SubmittedAnswer(
normalized_answer, interaction_id, answer_group_index,
rule_spec_index, classification_categorization, params,
session_id, time_spent_in_secs))
feedback_is_useful = (
classification_categorization != (
exp_domain.DEFAULT_OUTCOME_CLASSIFICATION))
stats_models.AnswerSubmittedEventLogEntryModel.create(
exploration_id, exploration_version, state_name, session_id,
time_spent_in_secs, feedback_is_useful)
class ExplorationActualStartEventHandler(BaseEventHandler):
"""Event handler for recording exploration actual start events."""
EVENT_TYPE = feconf.EVENT_TYPE_ACTUAL_START_EXPLORATION
@classmethod
def _handle_event(
cls, exp_id, exp_version, state_name, session_id):
stats_models.ExplorationActualStartEventLogEntryModel.create(
exp_id, exp_version, state_name, session_id)
class SolutionHitEventHandler(BaseEventHandler):
"""Event handler for recording solution hit events."""
EVENT_TYPE = feconf.EVENT_TYPE_SOLUTION_HIT
@classmethod
def _handle_event(
cls, exp_id, exp_version, state_name, session_id,
time_spent_in_state_secs):
stats_models.SolutionHitEventLogEntryModel.create(
exp_id, exp_version, state_name, session_id,
time_spent_in_state_secs)
class StartExplorationEventHandler(BaseEventHandler):
"""Event handler for recording exploration start events."""
EVENT_TYPE = feconf.EVENT_TYPE_START_EXPLORATION
@classmethod
def _handle_event(
cls, exp_id, exp_version, state_name, session_id, params,
play_type):
stats_models.StartExplorationEventLogEntryModel.create(
exp_id, exp_version, state_name, session_id, params,
play_type)
class MaybeLeaveExplorationEventHandler(BaseEventHandler):
"""Event handler for recording exploration leave events."""
EVENT_TYPE = feconf.EVENT_TYPE_MAYBE_LEAVE_EXPLORATION
@classmethod
def _handle_event(
cls, exp_id, exp_version, state_name, session_id, time_spent,
params, play_type):
stats_models.MaybeLeaveExplorationEventLogEntryModel.create(
exp_id, exp_version, state_name, session_id, time_spent,
params, play_type)
class CompleteExplorationEventHandler(BaseEventHandler):
"""Event handler for recording exploration completion events."""
EVENT_TYPE = feconf.EVENT_TYPE_COMPLETE_EXPLORATION
@classmethod
def _handle_event(
cls, exp_id, exp_version, state_name, session_id, time_spent,
params, play_type):
stats_models.CompleteExplorationEventLogEntryModel.create(
exp_id, exp_version, state_name, session_id, time_spent,
params, play_type)
class RateExplorationEventHandler(BaseEventHandler):
"""Event handler for recording exploration rating events."""
EVENT_TYPE = feconf.EVENT_TYPE_RATE_EXPLORATION
@classmethod
def _handle_event(cls, exploration_id, user_id, rating, old_rating):
stats_models.RateExplorationEventLogEntryModel.create(
exploration_id, user_id, rating, old_rating)
class StateHitEventHandler(BaseEventHandler):
"""Event handler for recording state hit events."""
EVENT_TYPE = feconf.EVENT_TYPE_STATE_HIT
# TODO(sll): remove params before sending this event to the jobs taskqueue.
@classmethod
def _handle_event(
cls, exp_id, exp_version, state_name, session_id,
params, play_type):
stats_models.StateHitEventLogEntryModel.create(
exp_id, exp_version, state_name, session_id,
params, play_type)
class StateCompleteEventHandler(BaseEventHandler):
"""Event handler for recording state complete events."""
EVENT_TYPE = feconf.EVENT_TYPE_STATE_COMPLETED
@classmethod
def _handle_event(
cls, exp_id, exp_version, state_name, session_id,
time_spent_in_state_secs):
stats_models.StateCompleteEventLogEntryModel.create(
exp_id, exp_version, state_name, session_id,
time_spent_in_state_secs)
class LeaveForRefresherExpEventHandler(BaseEventHandler):
"""Event handler for recording "leave for refresher exploration" events."""
EVENT_TYPE = feconf.EVENT_TYPE_LEAVE_FOR_REFRESHER_EXP
@classmethod
def _handle_event(
cls, exp_id, refresher_exp_id, exp_version, state_name, session_id,
time_spent_in_state_secs):
stats_models.LeaveForRefresherExplorationEventLogEntryModel.create(
exp_id, refresher_exp_id, exp_version, state_name, session_id,
time_spent_in_state_secs)
class FeedbackThreadCreatedEventHandler(BaseEventHandler):
"""Event handler for recording new feedback thread creation events."""
EVENT_TYPE = feconf.EVENT_TYPE_NEW_THREAD_CREATED
@classmethod
def _handle_event(cls, exp_id):
pass
class FeedbackThreadStatusChangedEventHandler(BaseEventHandler):
"""Event handler for recording reopening feedback thread events."""
EVENT_TYPE = feconf.EVENT_TYPE_THREAD_STATUS_CHANGED
@classmethod
def _handle_event(cls, exp_id, old_status, new_status):
pass
class Registry(object):
"""Registry of event handlers."""
# Dict mapping event types to their classes.
_event_types_to_classes = {}
@classmethod
def _refresh_registry(cls):
"""Regenerates the event handler registry."""
cls._event_types_to_classes.clear()
# Find all subclasses of BaseEventHandler in the current module.
for obj_name, obj in globals().iteritems():
if inspect.isclass(obj) and issubclass(obj, BaseEventHandler):
if obj_name == 'BaseEventHandler':
continue
cls._event_types_to_classes[obj.EVENT_TYPE] = obj
@classmethod
def get_event_class_by_type(cls, event_type):
"""Gets an event handler class by its type.
Refreshes once if the event type is not found; subsequently, throws an
error.
"""
if event_type not in cls._event_types_to_classes:
cls._refresh_registry()
return cls._event_types_to_classes[event_type]
|
apache-2.0
| -1,731,946,290,718,996,500
| 34.675497
| 80
| 0.679785
| false
| 4.131135
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.