code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
if len(args) <= 0:
raise ValueError("you didn't pass any arguments to print out")
with Reflect.context(args, **kwargs) as r:
instance = R_CLASS(r, stream, **kwargs)
instance()
|
def r(*args, **kwargs)
|
Similar to pout.v() but gets rid of name and file information so it can be used
in loops and stuff, it will print out where the calls came from at the end of
execution
this just makes it nicer when you're printing a bunch of stuff each iteration
:Example:
for x in range(x):
pout.r(x)
| 13.654149
| 12.522874
| 1.090337
|
'''
same as sys.exit(1) but prints out where it was called from before exiting
I just find this really handy for debugging sometimes
since -- 2013-5-9
exit_code -- int -- if you want it something other than 1
'''
with Reflect.context(args, **kwargs) as r:
instance = V_CLASS(r, stream, **kwargs)
if args:
instance()
else:
instance.writelines([
'exit at line {}\n'.format(instance.reflect.info["line"]),
instance.path_value()
])
exit_code = 1
sys.exit(exit_code)
|
def x(*args, **kwargs)
|
same as sys.exit(1) but prints out where it was called from before exiting
I just find this really handy for debugging sometimes
since -- 2013-5-9
exit_code -- int -- if you want it something other than 1
| 16.196436
| 5.561992
| 2.911985
|
'''
prints "here count"
example --
h(1) # here 1 (/file:line)
h() # here line (/file:line)
count -- integer -- the number you want to put after "here"
'''
with Reflect.context(**kwargs) as r:
kwargs["count"] = count
instance = H_CLASS(r, stream, **kwargs)
instance()
|
def h(count=0, **kwargs)
|
prints "here count"
example --
h(1) # here 1 (/file:line)
h() # here line (/file:line)
count -- integer -- the number you want to put after "here"
| 17.08647
| 4.574818
| 3.734896
|
'''
create a big text break, you just kind of have to run it and see
since -- 2013-5-9
*args -- 1 arg = title if string, rows if int
2 args = title, int
3 args = title, int, sep
'''
with Reflect.context(**kwargs) as r:
kwargs["args"] = args
instance = B_CLASS(r, stream, **kwargs)
instance()
|
def b(*args, **kwargs)
|
create a big text break, you just kind of have to run it and see
since -- 2013-5-9
*args -- 1 arg = title if string, rows if int
2 args = title, int
3 args = title, int, sep
| 28.52482
| 4.055213
| 7.034111
|
'''
kind of like od -c on the command line, basically it dumps each character and info
about that char
since -- 2013-5-9
*args -- tuple -- one or more strings to dump
'''
with Reflect.context(**kwargs) as r:
kwargs["args"] = args
instance = C_CLASS(r, stream, **kwargs)
instance()
|
def c(*args, **kwargs)
|
kind of like od -c on the command line, basically it dumps each character and info
about that char
since -- 2013-5-9
*args -- tuple -- one or more strings to dump
| 28.894947
| 4.662245
| 6.197646
|
if not args:
raise ValueError("you didn't pass any arguments to print out")
with Reflect.context(args, **kwargs) as r:
instance = J_CLASS(r, stream, **kwargs)
instance()
|
def j(*args, **kwargs)
|
dump json
since -- 2013-9-10
*args -- tuple -- one or more json strings to dump
| 16.826601
| 18.345678
| 0.917197
|
with Reflect.context(**kwargs) as r:
kwargs["name"] = name
instance = M_CLASS(r, stream, **kwargs)
instance()
|
def m(name='', **kwargs)
|
Print out memory usage at this point in time
http://docs.python.org/2/library/resource.html
http://stackoverflow.com/a/15448600/5006
http://stackoverflow.com/questions/110259/which-python-memory-profiler-is-recommended
| 20.384535
| 27.864939
| 0.731548
|
'''
really quick and dirty profiling
you start a profile by passing in name, you stop the top profiling by not
passing in a name. You can also call this method using a with statement
This is for when you just want to get a really back of envelope view of
how your fast your code is, super handy, not super accurate
since -- 2013-5-9
example --
p("starting profile")
time.sleep(1)
p() # stop the "starting profile" session
# you can go N levels deep
p("one")
p("two")
time.sleep(0.5)
p() # stop profiling of "two"
time.sleep(0.5)
p() # stop profiling of "one"
with pout.p("three"):
time.sleep(0.5)
name -- string -- pass this in to start a profiling session
return -- context manager
'''
with Reflect.context(**kwargs) as r:
if name:
instance = P_CLASS(r, stream, name, **kwargs)
else:
instance = P_CLASS.pop(r)
instance()
return instance
|
def p(name="", **kwargs)
|
really quick and dirty profiling
you start a profile by passing in name, you stop the top profiling by not
passing in a name. You can also call this method using a with statement
This is for when you just want to get a really back of envelope view of
how your fast your code is, super handy, not super accurate
since -- 2013-5-9
example --
p("starting profile")
time.sleep(1)
p() # stop the "starting profile" session
# you can go N levels deep
p("one")
p("two")
time.sleep(0.5)
p() # stop profiling of "two"
time.sleep(0.5)
p() # stop profiling of "one"
with pout.p("three"):
time.sleep(0.5)
name -- string -- pass this in to start a profiling session
return -- context manager
| 12.637074
| 1.792399
| 7.050369
|
'''
same as time.sleep(seconds) but prints out where it was called before sleeping
and then again after finishing sleeping
I just find this really handy for debugging sometimes
since -- 2017-4-27
:param seconds: float|int, how many seconds to sleep
'''
if seconds <= 0.0:
raise ValueError("Invalid seconds {}".format(seconds))
with Reflect.context(**kwargs) as r:
instance = V_CLASS(r, stream, **kwargs)
instance.writeline("Sleeping {} second{} at {}".format(
seconds,
"s" if seconds != 1.0 else "",
instance.path_value()
))
time.sleep(seconds)
instance.writelines(["...Done Sleeping\n", instance.path_value()])
|
def sleep(seconds, **kwargs)
|
same as time.sleep(seconds) but prints out where it was called before sleeping
and then again after finishing sleeping
I just find this really handy for debugging sometimes
since -- 2017-4-27
:param seconds: float|int, how many seconds to sleep
| 8.636835
| 3.644359
| 2.369919
|
'''
print a backtrace
since -- 7-6-12
inpsect_packages -- boolean -- by default, this only prints code of packages that are not
in the pythonN directories, that cuts out a lot of the noise, set this to True if you
want a full stacktrace
depth -- integer -- how deep you want the stack trace to print (ie, if you only care about
the last three calls, pass in depth=3 so you only get the last 3 rows of the stack)
'''
#frame = inspect.currentframe()
try:
frames = inspect.stack()
kwargs["frames"] = frames
kwargs["inspect_packages"] = inspect_packages
kwargs["depth"] = depth
with Reflect.context(**kwargs) as r:
instance = T_CLASS(r, stream, **kwargs)
instance()
finally:
del frames
|
def t(inspect_packages=False, depth=0, **kwargs)
|
print a backtrace
since -- 7-6-12
inpsect_packages -- boolean -- by default, this only prints code of packages that are not
in the pythonN directories, that cuts out a lot of the noise, set this to True if you
want a full stacktrace
depth -- integer -- how deep you want the stack trace to print (ie, if you only care about
the last three calls, pass in depth=3 so you only get the last 3 rows of the stack)
| 12.188622
| 2.631366
| 4.63205
|
try:
from .compat import builtins
module = sys.modules[__name__]
setattr(builtins, __name__, module)
#builtins.pout = pout
except ImportError:
pass
|
def inject()
|
Injects pout into the builtins module so it can be called from anywhere without
having to be explicitely imported, this is really just for convenience when
debugging
https://stackoverflow.com/questions/142545/python-how-to-make-a-cross-module-variable
| 7.841098
| 5.351947
| 1.465092
|
import glob
from subprocess import Popen, PIPE
cams=[]
for device in glob.glob("/sys/class/video4linux/*"):
devname=device.split("/")[-1]
devfile=os.path.join("/dev",devname)
lis=("v4l2-ctl --list-formats -d "+devfile).split()
p = Popen(lis, stdout=PIPE, stderr=PIPE)
# p.communicate()
# print(dir(p))
# print(p.returncode)
# print(p.stderr.read().decode("utf-8"))
st = p.stdout.read().decode("utf-8")
# print(st)
if (st.lower().find("h264")>-1):
namefile=os.path.join(device, "name")
# print(namefile)
f=open(namefile, "r"); name=f.read(); f.close()
fullname = name.strip() + " ("+devname+")"
cams.append((devfile, fullname))
if (verbose):
for cam in cams:
print(cam)
return cams
|
def getH264V4l2(verbose=False)
|
Find all V4l2 cameras with H264 encoding, and returns a list of tuples with ..
(device file, device name), e.g. ("/dev/video2", "HD Pro Webcam C920 (/dev/video2)")
| 2.932793
| 2.657199
| 1.103716
|
app_ = CustomFlask(
__name__,
instance_path=instance_path or os.environ.get(
'VERIPRESS_INSTANCE_PATH') or os.getcwd(),
instance_relative_config=True
)
app_.config.update(dict(STORAGE_TYPE='file',
THEME='default',
CACHE_TYPE='simple',
MODE='view-only',
ENTRIES_PER_PAGE=5,
FEED_COUNT=10,
SHOW_TOC=True,
TOC_DEPTH=3,
TOC_LOWEST_LEVEL=3,
ALLOW_SEARCH_PAGES=True,
PAGE_SOURCE_ACCESSIBLE=False))
app_.config.from_pyfile(config_filename, silent=True)
theme_folder = os.path.join(app_.instance_path,
'themes', app_.config['THEME'])
# use templates in the selected theme's folder
app_.template_folder = os.path.join(theme_folder, 'templates')
# use static files in the selected theme's folder
app_.theme_static_folder = os.path.join(theme_folder, 'static')
# global static folder
app_.static_folder = os.path.join(app_.instance_path, 'static')
return app_
|
def create_app(config_filename, instance_path=None)
|
Factory function to create Flask application object.
:param config_filename: absolute or relative filename of the config file
:param instance_path: instance path to initialize or run a VeriPress app
:return: a Flask app object
| 3.453675
| 3.38164
| 1.021302
|
if self.config['MODE'] == 'api-only':
# if 'api-only' mode is set, we should not send static files
abort(404)
theme_static_folder = getattr(self, 'theme_static_folder', None)
if theme_static_folder:
try:
return send_from_directory(theme_static_folder, filename)
except NotFound:
pass
return super(CustomFlask, self).send_static_file(filename)
|
def send_static_file(self, filename)
|
Send static files from the static folder
in the current selected theme prior to the global static folder.
:param filename: static filename
:return: response object
| 3.664768
| 3.48513
| 1.051544
|
if self.device == device:
print(self.pre, "setDevice : same device")
return
if self.filterchain: # there's video already
self.clearDevice()
self.device = device
self.video.setDevice(self.device) # inform the video widget so it can start drags
# ManagedFilterChain.addViewPort accepts ViewPort instance
self.filterchain = self.filterchain_group.get(_id = self.device._id)
if self.filterchain:
self.viewport.setXScreenNum(self.n_xscreen)
self.viewport.setWindowId (int(self.video.winId()))
self.filterchain.addViewPort(self.viewport)
|
def setDevice(self, device):
print(self.pre, "setDevice :", device)
if (not device and not self.device): # None can be passed as an argument when the device has not been set yet
return
if (self.device)
|
Sets the video stream
:param device: A rather generic device class. In this case DataModel.RTSPCameraDevice.
| 10.199016
| 10.13269
| 1.006546
|
print(self.pre, "clearDevice")
if not self.device:
return
self.filterchain.delViewPort(self.viewport)
self.filterchain = None
self.device = None
self.video.update()
|
def clearDevice(self)
|
Remove the current stream
| 9.8578
| 9.246626
| 1.066097
|
print(self.pre, ": mouseGestureHandler: ")
# *** single click events ***
if (info.fsingle):
print(self.pre, ": mouseGestureHandler: single click")
if (info.button == QtCore.Qt.LeftButton):
print(self.pre, ": mouseGestureHandler: Left button clicked")
elif (info.button == QtCore.Qt.RightButton):
print(self.pre, ": mouseGestureHandler: Right button clicked")
self.handle_right_single_click(info)
# *** double click events ***
elif (info.fdouble):
if (info.button == QtCore.Qt.LeftButton):
print(
self.pre,
": mouseGestureHandler: Left button double-clicked")
self.handle_left_double_click(info)
elif (info.button == QtCore.Qt.RightButton):
print(
self.pre,
": mouseGestureHandler: Right button double-clicked")
|
def mouseGestureHandler(self, info)
|
This is the callback for MouseClickContext. Passed to VideoWidget as a parameter
| 2.326824
| 2.273804
| 1.023318
|
if (self.double_click_focus == False): # turn focus on
print(self.pre, "handle_left_double_click: focus on")
self.cb_focus()
else: # turn focus off
print(self.pre, "handle_left_double_click: focus off")
self.cb_unfocus()
self.double_click_focus = not(
self.double_click_focus)
|
def handle_left_double_click(self, info)
|
Whatever we want to do, when the VideoWidget has been double-clicked with the left button
| 3.601012
| 3.458732
| 1.041136
|
storage_ = getattr(g, '_storage', None)
if storage_ is None:
storage_type = current_app.config['STORAGE_TYPE']
if storage_type == 'file':
storage_ = g._storage = storages.FileStorage()
else:
raise ConfigurationError(
'Storage type "{}" is not supported.'.format(storage_type))
return storage_
|
def get_storage()
|
Get storage object of current app context,
will create a new one if not exists.
:return: a storage object
:raise: ConfigurationError: storage type in config is not supported
| 2.719813
| 2.760179
| 0.985376
|
self.current_slot = None
if (verbose):
# enable this if you're unsure what's coming here..
print(self.pre, "chooseForm_slot :", element)
if (isinstance(element, type(None))):
self.current_row = None
self.element = None
else:
# print(self.pre,"chooseForm_slot :",element)
assert(hasattr(element, "_id"))
assert(hasattr(element, "classname"))
try:
self.current_row = self.row_instance_by_name[element.classname]
except KeyError:
print(
self.pre,
"chooseForm_slot : no such classname for this FormSet : ",
element.classname)
self.current_row = None
self.element = None
else:
self.resetForm()
self.current_row.get(self.collection, element._id)
self.element = element
self.current_slot = self.current_row.get_column_value("slot")
self.showCurrent()
|
def chooseForm_slot(self, element, element_old)
|
Calling this slot chooses the form to be shown
:param element: an object that has *_id* and *classname* attributes
:param element_old: an object that has *_id* and *classname* attributes
This slot is typically connected to List classes, widget attribute's, currentItemChanged method (List.widget is QListWidget that has currentItemChanged slot), so the element and element_old parameters are QListWidgetItem instances with extra attributes "_id" and "_classname" attached.
Queries the database for element._id
| 4.560148
| 4.598658
| 0.991626
|
self.dropdown_widget.clear() # this will trigger dropdown_changed_slot
self.row_instance_by_index = []
for i, key in enumerate(self.row_instance_by_name.keys()):
row_instance = self.row_instance_by_name[key]
if (row_instance.isActive()):
self.row_instance_by_index.append(row_instance)
display_name = row_instance.getName()
self.dropdown_widget.insertItem(i, display_name)
row_instance.updateWidget()
|
def update_dropdown_list_slot(self)
|
Keep updating the dropdown list. Say, don't let the user choose USB devices if none is available
| 3.493355
| 3.458874
| 1.009969
|
self.reset()
for dic in self.datamodel.camera_collection.get(): # TODO: search directly for RTSPCameraRow
if (self.verbose): print(self.pre, "read : dic", dic)
if (dic["classname"] == DataModel.RTSPCameraRow.__name__):
affinity = -1
if self.cpu_scheme:
affinity = self.cpu_scheme.getAV()
dic.pop("classname")
device = DataModel.RTSPCameraDevice(**dic) # a neat object with useful methods
print("FilterChainGroup : read : slot =", device.getLiveMainSlot())
print("FilterChainGroup : read : address =", device.getMainAddress())
print("FilterChainGroup : read : _id =", device._id)
# chain = ManagedFilterchain( # decoding and branching the stream happens here
# chain = ManagedFilterchain2( # decoding and branching the stream happens here
chain = LiveManagedFilterchain( # decoding and branching the stream happens here
livethread = self.livethread,
openglthreads
= self.gpu_handler.openglthreads,
address = device.getMainAddress(),
slot = device.getLiveMainSlot(),
_id = device._id,
affinity = affinity,
msreconnect = 10000,
# verbose = True,
verbose =False,
shmem_image_dimensions = constant.shmem_image_dimensions,
shmem_n_buffer = constant.shmem_n_buffer,
shmem_image_interval = constant.shmem_image_interval
)
self.chains.append(chain) # important .. otherwise chain will go out of context and get garbage collected
elif (dic["classname"] == DataModel.USBCameraRow.__name__):
affinity = -1
if self.cpu_scheme:
affinity = self.cpu_scheme.getAV()
dic.pop("classname")
device = DataModel.USBCameraDevice(**dic) # a neat object with useful methods
print("FilterChainGroup : read : slot =", device.getLiveMainSlot())
print("FilterChainGroup : read : address =", device.getMainAddress())
print("FilterChainGroup : read : _id =", device._id)
chain = USBManagedFilterchain( # decoding and branching the stream happens here
usbthread = self.usbthread,
openglthreads
= self.gpu_handler.openglthreads,
address = device.getMainAddress(),
slot = device.getLiveMainSlot(),
_id = device._id,
affinity = affinity,
msreconnect = 10000,
# verbose = True,
verbose =False,
shmem_image_dimensions = constant.shmem_image_dimensions,
shmem_n_buffer = constant.shmem_n_buffer,
shmem_image_interval = constant.shmem_image_interval
)
self.chains.append(chain)
|
def read(self)
|
Reads all devices from the database and creates filterchains
TODO: we can, of course, just modify the added / removed cameras
| 3.647189
| 3.480046
| 1.048029
|
raise(AssertionError("out of date"))
new_ids = []
old_ids = []
# collect old ip addresses
for chain in self.chains:
if (self.verbose): print(self.pre, "old :", chain, chain.get__id(), chain.get_address(), chain._id)
old_ids.append(chain.get__id())
# collect devices
devices = []
for dic in self.datamodel.camera_collection.get():
if (self.verbose): print(self.pre, "update : dic", dic)
if (dic["classname"] == DataModel.RTSPCameraRow.__name__):
devices.append(dic)
devices_by_id={}
for dic in devices: # DataModel.RTSPCameraRow instances
_id = dic["_id"]
new_ids.append(_id)
devices_by_id[_id] = dic
if (self.verbose):
print(self.pre, "update : new_ids =", new_ids)
print(self.pre, "update : old_ids =", old_ids)
add_list = list(set(new_ids).difference(set(old_ids))) # cams to be added
rem_list = list(set(old_ids).difference(set(new_ids))) # cams to be removed
if (self.verbose):
print(self.pre, "update : add_list =", add_list)
print(self.pre, "update : rem_list =", rem_list)
# purge removed chains
for i, chain in enumerate(self.chains):
if (chain.get__id() in rem_list):
chain_ = self.chains.pop(i)
if (self.verbose): print(self.pre, "closing chain", chain_)
chain_.close()
# add new chains
for new_address in add_list:
dic = devices_by_id[new_address]
chain = ManagedFilterchain( # decoding and branching the stream happens here
livethread = self.livethread,
openglthreads
= self.gpu_handler.openglthreads,
address = DataModel.RTSPCameraRow.getMainAddressFromDict(dic),
slot = dic["slot"],
_id = dic["_id"],
# affinity = a,
msreconnect = 10000,
verbose = True
)
if (self.verbose): print(self.pre, "adding chain", chain)
self.chains.append(chain)
|
def update(self)
|
Reads all devices from the database. Creates new filterchains and removes old ones
TODO: currently this is broken: if user changes any other field than the ip address, the cameras don't get updated
| 4.133329
| 3.874654
| 1.066761
|
for chain in self.chains:
for key in kwargs:
getter_name = "get_"+key
# scan all possible getters
if (hasattr(chain, getter_name)):
getter = getattr(chain, getter_name) # e.g. "get_address"
if (getter() == kwargs[key]):
return chain
return None
|
def get(self, **kwargs)
|
Find correct filterchain based on generic variables
| 4.994916
| 4.189115
| 1.192356
|
return None
# get filterchain init parameters that are compatible with RTSPCameraDevice input parameters
pars = filterchain.getParDic(DataModel.RTSPCameraDevice.parameter_defs)
# .. and instantiate an RTSPCameraDevice with those parameters
device = DataModel.RTSPCameraDevice(**pars)
print(self.pre, "getDevice :", pars, device)
return device
|
def getDevice(self, **kwargs):
filterchain = self.get(**kwargs)
if not filterchain
|
Like get, but returns a Device instance (RTSPCameraDevice, etc.)
| 13.489644
| 10.274257
| 1.312956
|
return {k: getattr(self, k) for k in filter(
lambda k: not k.startswith('_') and k != 'to_dict', dir(self))}
|
def to_dict(self)
|
Convert attributes and properties to a dict,
so that it can be serialized.
| 3.428604
| 3.007451
| 1.140037
|
return {
'level': self.level,
'id': self.id,
'text': self.text,
'inner_html': self.inner_html,
'children': [child.to_dict() for child in self.children]
}
|
def to_dict(self)
|
Convert self to a dict object for serialization.
| 2.627521
| 2.311705
| 1.136616
|
depth = min(max(depth, 0), 6)
depth = 6 if depth == 0 else depth
lowest_level = min(max(lowest_level, 1), 6)
toc = self._root.to_dict()['children']
def traverse(curr_toc, dep, lowest_lvl, curr_depth=1):
if curr_depth > dep:
# clear all items of this depth and exit the recursion
curr_toc.clear()
return
items_to_remove = []
for item in curr_toc:
if item['level'] > lowest_lvl:
# record item with low header level, for removing it later
items_to_remove.append(item)
else:
traverse(item['children'], dep, lowest_lvl, curr_depth + 1)
[curr_toc.remove(item) for item in items_to_remove]
traverse(toc, depth, lowest_level)
return toc
|
def toc(self, depth=6, lowest_level=6)
|
Get table of content of currently fed HTML string.
:param depth: the depth of TOC
:param lowest_level: the allowed lowest level of header tag
:return: a list representing the TOC
| 3.11719
| 3.227699
| 0.965762
|
toc = self.toc(depth=depth, lowest_level=lowest_level)
if not toc:
return ''
def map_toc_list(toc_list):
result = ''
if toc_list:
result += '<ul>\n'
result += ''.join(
map(lambda x: '<li>'
'<a href="#{}">{}</a>{}'
'</li>\n'.format(
x['id'], x['inner_html'],
map_toc_list(x['children'])),
toc_list)
)
result += '</ul>'
return result
return map_toc_list(toc)
|
def toc_html(self, depth=6, lowest_level=6)
|
Get TOC of currently fed HTML string in form of HTML string.
:param depth: the depth of TOC
:param lowest_level: the allowed lowest level of header tag
:return: an HTML string
| 2.694659
| 3.032434
| 0.888613
|
m = re.match(r'^h([123456])$', tag, flags=re.IGNORECASE)
if not m:
return None
return int(m.group(1))
|
def _get_level(tag)
|
Match the header level in the given tag name,
or None if it's not a header tag.
| 3.090827
| 2.431822
| 1.270992
|
from darknet.core import darknet_with_cuda
if (darknet_with_cuda()): # its using cuda
free = getFreeGPU_MB()
print("Yolo: requiredGPU_MB: required, free", n, free)
if (free == -1): # could not detect ..
return True
return (free>=n)
else:
return True
|
def requiredGPU_MB(self, n)
|
Required GPU memory in MBytes
| 11.272901
| 11.22172
| 1.004561
|
if (self.requiredGPU_MB(self.required_mb)):
self.analyzer = YoloV3Analyzer(verbose = self.verbose)
else:
self.warning_message = "WARNING: not enough GPU memory!"
self.analyzer = None
|
def postActivate_(self)
|
Whatever you need to do after creating the shmem client
| 13.64822
| 13.122101
| 1.040094
|
self.widget = QtWidgets.QTextEdit()
self.widget.setStyleSheet(style.detector_test)
self.widget.setReadOnly(True)
self.signals.objects.connect(self.objects_slot)
return self.widget
|
def getWidget(self)
|
Some ideas for your widget:
- Textual information (alert, license place number)
- Check boxes : if checked, send e-mail to your mom when the analyzer spots something
- .. or send an sms to yourself
- You can include the cv2.imshow window to the widget to see how the analyzer proceeds
| 7.092404
| 6.418005
| 1.105079
|
'''
get the type of val
there are multiple places where we want to know if val is an object, or a string, or whatever,
this method allows us to find out that information
since -- 7-10-12
val -- mixed -- the value to check
return -- string -- the type
'''
t = 'DEFAULT'
# http://docs.python.org/2/library/types.html
# func_types = (
# types.FunctionType,
# types.BuiltinFunctionType,
# types.MethodType,
# types.UnboundMethodType,
# types.BuiltinFunctionType,
# types.BuiltinMethodType,
# classmethod
# )
if self.is_primitive():
t = 'DEFAULT'
elif self.is_dict():
t = 'DICT'
elif self.is_list():
t = 'LIST'
elif self.is_array():
t = 'ARRAY'
elif self.is_tuple():
t = 'TUPLE'
elif self.is_type():
t = 'TYPE'
elif self.is_binary():
t = 'BINARY'
elif self.is_str():
t = 'STRING'
elif self.is_exception():
t = 'EXCEPTION'
elif self.is_module():
# this has to go before the object check since a module will pass the object tests
t = 'MODULE'
elif self.is_callable():
t = 'FUNCTION'
# not doing this one since it can cause the class instance to do unexpected
# things just to print it out
#elif isinstance(val, property):
# uses the @property decorator and the like
#t = 'PROPERTY'
elif self.is_dict_proxy():
# maybe we have a dict proxy?
t = 'DICT_PROXY'
elif self.is_generator():
t = 'GENERATOR'
elif self.is_set():
t = 'SET'
elif self.is_object():
t = 'OBJECT'
# elif isinstance(val, func_types) and hasattr(val, '__call__'):
# # this has to go after object because lots of times objects can be classified as functions
# # http://stackoverflow.com/questions/624926/
# t = 'FUNCTION'
elif self.is_regex():
t = 'REGEX'
else:
t = 'DEFAULT'
return t
|
def typename(self)
|
get the type of val
there are multiple places where we want to know if val is an object, or a string, or whatever,
this method allows us to find out that information
since -- 7-10-12
val -- mixed -- the value to check
return -- string -- the type
| 3.720349
| 2.779642
| 1.338427
|
if is_py2:
return isinstance(
self.val,
(
types.NoneType,
types.BooleanType,
types.IntType,
types.LongType,
types.FloatType
)
)
else:
return isinstance(
self.val,
(
type(None),
bool,
int,
float
)
)
|
def is_primitive(self)
|
is the value a built-in type?
| 2.659363
| 2.398672
| 1.108681
|
'''
turn an iteratable value into a string representation
iterator -- iterator -- the value to be iterated through
name_callback -- callback -- if not None, a function that will take the key of each iteration
prefix -- string -- what will be prepended to the generated value
left_paren -- string -- what will open the generated value
right_paren -- string -- what will close the generated value
depth -- integer -- how deep into recursion we are
return -- string
'''
indent = 1 if depth > 0 else 0
s = []
s.append('{}{}'.format(prefix, self._add_indent(left_paren, indent)))
s_body = []
for k, v in iterator:
k = k if name_callback is None else name_callback(k)
v = Value(v, depth+1)
try:
# TODO -- right here we should check some flag or something to
# see if lists should render objects
if k is None:
s_body.append("{}".format(v))
else:
s_body.append("{}: {}".format(k, v))
except RuntimeError as e:
# I've never gotten this to work
s_body.append("{}: ... Recursion error ...".format(k))
except UnicodeError as e:
print(v.val)
print(type(v.val))
s_body = ",\n".join(s_body)
s_body = self._add_indent(s_body, indent + 1)
s.append(s_body)
s.append("{}".format(self._add_indent(right_paren, indent)))
return "\n".join(s)
|
def _str_iterator(self, iterator, name_callback=None, prefix="\n", left_paren='[', right_paren=']', depth=0)
|
turn an iteratable value into a string representation
iterator -- iterator -- the value to be iterated through
name_callback -- callback -- if not None, a function that will take the key of each iteration
prefix -- string -- what will be prepended to the generated value
left_paren -- string -- what will open the generated value
right_paren -- string -- what will close the generated value
depth -- integer -- how deep into recursion we are
return -- string
| 3.748026
| 2.72983
| 1.372989
|
'''
add whitespace to the beginning of each line of val
link -- http://code.activestate.com/recipes/66055-changing-the-indentation-of-a-multi-line-string/
val -- string
indent -- integer -- how much whitespace we want in front of each line of val
return -- string -- val with more whitespace
'''
if isinstance(val, Value):
val = val.string_value()
return String(val).indent(indent_count)
|
def _add_indent(self, val, indent_count)
|
add whitespace to the beginning of each line of val
link -- http://code.activestate.com/recipes/66055-changing-the-indentation-of-a-multi-line-string/
val -- string
indent -- integer -- how much whitespace we want in front of each line of val
return -- string -- val with more whitespace
| 6.146233
| 2.077879
| 2.957935
|
try:
ret = getattr(val, key, default_val)
except Exception as e:
logger.exception(e)
ret = default_val
return ret
|
def _getattr(self, val, key, default_val)
|
wrapper around global getattr(...) method that suppresses any exception raised
| 3.323394
| 2.857077
| 1.163215
|
'''
get the full namespaced (module + class) name of the val object
since -- 6-28-12
val -- mixed -- the value (everything is an object) object
default -- string -- the default name if a decent name can't be found programmatically
return -- string -- the full.module.Name
'''
module_name = ''
if src_file:
module_name = '{}.'.format(self._getattr(val, '__module__', default)).lstrip('.')
class_name = self._getattr(val, '__name__', None)
if not class_name:
class_name = default
cls = self._getattr(val, '__class__', None)
if cls:
class_name = self._getattr(cls, '__name__', default)
full_name = "{}{}".format(module_name, class_name)
return full_name
|
def _get_name(self, val, src_file, default='Unknown')
|
get the full namespaced (module + class) name of the val object
since -- 6-28-12
val -- mixed -- the value (everything is an object) object
default -- string -- the default name if a decent name can't be found programmatically
return -- string -- the full.module.Name
| 5.106729
| 2.032238
| 2.512859
|
'''
return the source file path
since -- 7-19-12
val -- mixed -- the value whose path you want
return -- string -- the path, or something like 'Unknown' if you can't find the path
'''
path = default
try:
# http://stackoverflow.com/questions/6761337/inspect-getfile-vs-inspect-getsourcefile
# first try and get the actual source file
source_file = inspect.getsourcefile(val)
if not source_file:
# get the raw file since val doesn't have a source file (could be a .pyc or .so file)
source_file = inspect.getfile(val)
if source_file:
path = os.path.realpath(source_file)
except TypeError as e:
path = default
return path
|
def _get_src_file(self, val, default='Unknown')
|
return the source file path
since -- 7-19-12
val -- mixed -- the value whose path you want
return -- string -- the path, or something like 'Unknown' if you can't find the path
| 5.769792
| 2.983042
| 1.934197
|
count = e.end - e.start
#return "." * count, e.end
global ENCODING_REPLACE_CHAR
return ENCODING_REPLACE_CHAR * count, e.end
|
def handle_decode_replace(e)
|
this handles replacing bad characters when printing out
http://www.programcreek.com/python/example/3643/codecs.register_error
http://bioportal.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/codecs/index.html
https://pymotw.com/2/codecs/
| 8.202489
| 8.482296
| 0.967013
|
# (re)create the widget, do the same for children
# how children are placed on the parent widget, depends on the subclass
self.window = self.ContainerWindow(
self.signals, self.title, self.parent)
# send to correct x-screen
self.window.show()
self.window.windowHandle().setScreen(qscreen)
self.n_xscreen = self.gpu_handler.getXScreenNum(qscreen) # the correct x-screen number must be passed upstream, to the VideoContainer
# continue window / widget construction in the correct x screen
self.main_widget = self.ContainerWidget(self.window)
self.main_layout = QtWidgets.QVBoxLayout(self.main_widget)
self.window.setCentralWidget(self.main_widget)
# add here any extra turf to the widget you want in addition to the
# grid
# create the grid
self.grid_widget = self.GridWidget(self.main_widget)
self.main_layout.addWidget(self.grid_widget)
self.grid_layout = QtWidgets.QGridLayout(self.grid_widget)
self.grid_layout.setHorizontalSpacing(2)
self.grid_layout.setVerticalSpacing(2)
# ( int left, int top, int right, int bottom )
self.grid_layout.setContentsMargins(0, 0, 0, 0)
class ScreenMenu(QuickMenu):
title = "Change Screen"
elements = [
QuickMenuElement(title="Screen 1"),
QuickMenuElement(title="Screen 2")
]
if (len(self.gpu_handler.true_screens) > 1):
# so, there's more than a single x screen: create a button for
# changing x-screens
self.button = QtWidgets.QPushButton(
"Change Screen", self.main_widget)
self.main_layout.addWidget(self.button)
self.button.setSizePolicy(
QtWidgets.QSizePolicy.Minimum,
QtWidgets.QSizePolicy.Minimum)
self.button.clicked.connect(self.change_xscreen_slot)
self.placeChildren()
|
def makeWidget(self, qscreen: QtGui.QScreen)
|
TODO: activate after gpu-hopping has been debugged
self.screenmenu = ScreenMenu(self.window)
self.screenmenu.screen_1.triggered.connect(self.test_slot)
self.screenmenu.screen_2.triggered.connect(self.test_slot)
| 4.928752
| 4.927983
| 1.000156
|
if (self.closed):
return
print(self.pre, "close")
for child in self.children:
child.close()
self.openglthread = None
self.gpu_handler = None
self.closed = True
self.window.unSetPropagate() # we don't want the window to send the close signal .. which would call this *again* (through close_slot)
self.window.close()
|
def close(self)
|
Called by the main gui to close the containers. Called also when the container widget is closed
Closed by clicking the window: goes through self.close_slot
Closed programmatically: use this method directly
| 11.542007
| 10.025857
| 1.151224
|
ids = []
for child in self.children:
device = child.getDevice()
if device:
ids.append(device._id) # e.g. DataModel.RTSPCameraDevice._id
else:
ids.append(None)
# gather all information to re-construct this RootVideoContainer
dic = { # these are used when re-instantiating the view
"classname": self.__class__.__name__,
"kwargs": { # parameters that we're used to instantiate this class
"title" : self.title,
"n_xscreen" : self.n_xscreen,
"child_class" : self.child_class,
"child_class_pars" : self.get_child_class_pars() # serialize only relevant child class pars
},
# these parameters are used by deserialize
"x": self.window.x(),
"y": self.window.y(),
"width": self.window.width(),
"height": self.window.height(),
"ids": ids
}
return dic
|
def serialize(self)
|
Serialize information about the widget: coordinates, size, which cameras are selected.
| 6.206206
| 5.807245
| 1.068701
|
self.prevframe = None
self.wasmoving = False
self.t0 = 0
self.ismoving = False
|
def reset(self)
|
Reset analyzer state
| 8.9214
| 8.599535
| 1.037428
|
widget = QtWidgets.QLabel("NO MOVEMENT YET")
widget.setStyleSheet(style.detector_test)
self.signals.start_move.connect(lambda : widget.setText("MOVEMENT START"))
self.signals.stop_move. connect(lambda : widget.setText("MOVEMENT STOP"))
return widget
|
def getWidget(self)
|
Some ideas for your widget:
- Textual information (alert, license place number)
- Check boxes : if checked, send e-mail to your mom when the analyzer spots something
- .. or send an sms to yourself
- You can include the cv2.imshow window to the widget to see how the analyzer proceeds
| 6.77112
| 6.865405
| 0.986267
|
return url_rule(api_blueprint, rules, strict_slashes=strict_slashes,
view_func=json_api(api_func) if api_func else None,
*args, **kwargs)
|
def rule(rules, strict_slashes=False, api_func=None, *args, **kwargs)
|
Add a API route to the 'api' blueprint.
:param rules: rule string or string list
:param strict_slashes: same to Blueprint.route, but default value is False
:param api_func: a function that returns a JSON serializable object
or a Flask Response, or raises ApiException
:param args: other args that should be passed to Blueprint.route
:param kwargs: other kwargs that should be passed to Blueprint.route
:return:
| 4.321701
| 5.200716
| 0.830982
|
qapp = QtCore.QCoreApplication.instance()
if not qapp: # QApplication has not been started
return
screens = qapp.screens()
virtual_screens = set()
for screen in screens:
# if screen has been deemed as "virtual", don't check its siblings
if (screen not in virtual_screens):
siblings = screen.virtualSiblings()
# remove the current screen under scrutiny from the siblings
# list
virtual_screens.update(set(siblings).difference(set([screen])))
# .. the ones left over are virtual
# print("GPUHandler: findXScreens: virtual screens",virtual_screens)
true_screens = list(set(screens) - virtual_screens)
# sort'em
for screen in true_screens:
self.true_screens.insert(screens.index(screen), screen)
print("GPUHandler: findXScreens: true screens:", self.true_screens)
|
def findXScreens(self)
|
let's find out which screens are virtual
screen, siblings:
One big virtual desktop:
A [A, B, C]
B [A, B, C]
C [A, B, C]
A & B in one xscreen, C in another:
A [A, B]
B [A, B]
C [C]
| 5.840959
| 5.544174
| 1.053531
|
self.dm = DataModel(directory = tools.getConfigDir())
if (self.first_start):
print(pre, "readDB : first start")
self.dm.clearAll()
self.dm.saveAll()
# If camera collection is corrupt
if not self.dm.checkCameraCollection():
self.dm.clearCameraCollection()
|
def readDB(self)
|
Datamodel includes the following files: config.dat, devices.dat
| 10.303978
| 9.83156
| 1.048051
|
for i in range(1, 5):
# adds member function grid_ixi_slot(self)
self.make_grid_slot(i, i)
for cl in self.mvision_classes:
self.make_mvision_slot(cl)
|
def generateMethods(self)
|
Generate some member functions
| 12.492975
| 11.327808
| 1.102859
|
class QuickWindow(QtWidgets.QMainWindow):
class Signals(QtCore.QObject):
close = QtCore.Signal()
show = QtCore.Signal()
def __init__(self, blocking = False, parent = None, nude = False):
super().__init__(parent)
self.propagate = True # send signals or not
self.setStyleSheet(style.main_gui)
if (blocking):
self.setWindowModality(QtCore.Qt.ApplicationModal)
if (nude):
# http://doc.qt.io/qt-5/qt.html#WindowType-enum
# TODO: create a widget for a proper splashscreen (omitting X11 and centering manually)
# self.setWindowFlags(QtCore.Qt.Popup) # Qt 5.9+ : setFlags()
# self.setWindowFlags(QtCore.Qt.SplashScreen | QtCore.Qt.WindowStaysOnTopHint)
self.setWindowFlags(QtCore.Qt.Dialog)
self.signals = self.Signals()
def closeEvent(self, e):
if (self.propagate):
self.signals.close.emit()
e.accept()
def showEvent(self, e):
if (self.propagate):
self.signals.show.emit()
e.accept()
def setPropagate(self):
self.propagate = True
def unSetPropagate(self):
self.propagate = False
win = QuickWindow(blocking = blocking, nude = nude)
win.setCentralWidget(widget)
win.setLayout(QtWidgets.QHBoxLayout())
win.setWindowTitle(name)
return win
|
def QCapsulate(self, widget, name, blocking = False, nude = False)
|
Helper function that encapsulates QWidget into a QMainWindow
| 3.309016
| 3.29774
| 1.003419
|
class QuickWindow(QtWidgets.QMainWindow):
class Signals(QtCore.QObject):
close = QtCore.Signal()
show = QtCore.Signal()
def __init__(self, blocking = False, parent = None):
super().__init__(parent)
self.propagate = True # send signals or not
self.setStyleSheet(style.main_gui)
if (blocking):
self.setWindowModality(QtCore.Qt.ApplicationModal)
self.signals = self.Signals()
self.tab = QtWidgets.QTabWidget()
self.setCentralWidget(self.tab)
self.setLayout(QtWidgets.QHBoxLayout())
def closeEvent(self, e):
if (self.propagate):
self.signals.close.emit()
e.accept()
def showEvent(self, e):
if (self.propagate):
self.signals.show.emit()
e.accept()
def setPropagate(self):
self.propagate = True
def unSetPropagate(self):
self.propagate = False
win = QuickWindow(blocking = blocking)
win.setWindowTitle(name)
for w in widget_list:
win.tab.addTab(w[0], w[1])
return win
|
def QTabCapsulate(self, name, widget_list, blocking = False)
|
Helper function that encapsulates QWidget into a QMainWindow
:param widget_list: List of tuples : [(widget,"name"), (widget,"name"), ..]
| 2.61023
| 2.657547
| 0.982195
|
self.treelist.reset_()
self.server = ServerListItem(
name = "Localhost", ip = "127.0.0.1", parent = self.root)
devices = []
for row in self.dm.camera_collection.get():
# print(pre, "makeCameraTree : row", row)
if (row["classname"] == DataModel.RTSPCameraRow.__name__):
row.pop("classname")
devices.append(
RTSPCameraListItem(
camera = DataModel.RTSPCameraDevice(**row),
parent = self.server
)
)
elif (row["classname"] == DataModel.USBCameraRow.__name__):
row.pop("classname")
devices.append(
USBCameraListItem(
camera = DataModel.USBCameraDevice(**row),
parent = self.server
)
)
self.treelist.update()
self.treelist.expandAll()
|
def updateCameraTree(self)
|
self.server1 = ServerListItem(
name="First Server", ip="192.168.1.20", parent=self.root)
| 5.021948
| 4.17897
| 1.201719
|
# *** When camera list has been closed, re-create the cameralist tree and update filterchains ***
# self.manage_cameras_win.signals.close.connect(self.updateCameraTree) # now put into save_camera_config_slot
# self.manage_cameras_win.signals.close.connect(self.filterchain_group.update) # TODO: use this once fixed
# self.manage_cameras_win.signals.close.connect(self.filterchain_group.read) # TODO: eh.. lets be sure of this .. (are we releasing slots in the LiveThread etc.)
# self.manage_cameras_win.signals.close.connect(self.save_camera_config_slot)
# self.manage_memory_container.signals.save.connect(self.save_memory_conf_slot)
# *** Menu bar connections ***
# the self.filemenu.exit attribute was autogenerated
self.filemenu.exit. triggered.connect(self.exit_slot)
self.filemenu.save_window_layout. triggered.connect(
self.save_window_layout_slot)
self.filemenu.load_window_layout. triggered.connect(
self.load_window_layout_slot)
self.configmenu.configuration_dialog.triggered.connect(self.config_dialog_slot)
self.viewmenu.camera_list. triggered.connect(self.camera_list_slot)
self.aboutmenu.about_valkka_live. triggered.connect(self.about_slot)
# *** Connect autogenerated menu calls into autogenerated slot functions ***
for i in range(1, 5):
# gets member function grid_ixi_slot
slot_func = getattr(self, "grid_%ix%i_slot" % (i, i))
# gets member function grid_ixi from self.viewmenu.video_grid
menu_func = getattr(self.viewmenu.video_grid,
"grid_%ix%i" % (i, i))
menu_func.triggered.connect(slot_func)
# i.e., like this : self.viewmenu.video_grid.grid_1x1.triggered.connect(slot_func)
# *** autogenerated machine vision menu and slots ***
for cl in self.mvision_classes:
getattr(self.mvisionmenu,cl.name).triggered.connect(getattr(self,cl.name+"_slot"))
|
def makeLogic(self)
|
self.configmenu.manage_cameras. triggered.connect(
self.manage_cameras_slot)
self.configmenu.memory_usage. triggered.connect(
self.memory_usage_slot)
| 6.016417
| 5.379118
| 1.118476
|
container_list = []
mvision_container_list = []
for container in self.containers:
print("gui: serialize containers : container=", container)
container_list.append(container.serialize())
for container in self.mvision_containers:
mvision_container_list.append(container.serialize())
return {"container_list" : container_list, "mvision_container_list" : mvision_container_list}
|
def serializeContainers(self)
|
Serializes the current view of open video grids (i.e. the view)
| 3.415047
| 3.391465
| 1.006953
|
self.process_map = {} # each key is a list of started multiprocesses
# self.process_avail = {} # count instances
for mvision_class in self.mvision_classes:
name = mvision_class.name
tag = mvision_class.tag
num = mvision_class.max_instances
if (tag not in self.process_map):
self.process_map[tag] = []
# self.process_avail[tag] = num
for n in range(0, num):
p = mvision_class()
p.start()
self.process_map[tag].append(p)
|
def startProcesses(self)
|
Create and start python multiprocesses
Starting a multiprocess creates a process fork.
In theory, there should be no problem in first starting the multithreading environment and after that perform forks (only the thread requestin the fork is copied), but in practice, all kinds of weird behaviour arises.
Read all about it in here : http://www.linuxprogrammingblog.com/threads-and-fork-think-twice-before-using-them
| 4.107056
| 4.300865
| 0.954937
|
def slot_func():
cont = container.VideoContainerNxM(gpu_handler=self.gpu_handler,
filterchain_group=self.filterchain_group,
n_dim=n,
m_dim=m)
cont.signals.closing.connect(self.rem_container_slot)
self.containers.append(cont)
setattr(self, "grid_%ix%i_slot" % (n, m), slot_func)
|
def make_grid_slot(self, n, m)
|
Create a n x m video grid, show it and add it to the list of video containers
| 7.224446
| 6.592166
| 1.095914
|
ret = 0
try:
filepath = SiteCustomizeFile()
if filepath.is_injected():
logger.info("Pout has already been injected into {}".format(filepath))
else:
if filepath.inject():
logger.info("Injected pout into {}".format(filepath))
else:
logger.info("Failed to inject pout into {}".format(filepath))
except IOError as e:
ret = 1
logger.info(str(e))
return ret
|
def main_inject(args)
|
mapped to pout.inject on the command line, makes it easy to make pout global
without having to actually import it in your python environment
.. since:: 2018-08-13
:param args: Namespace, the parsed CLI arguments passed into the application
:returns: int, the return code of the CLI
| 4.555569
| 4.520798
| 1.007691
|
if args.site_packages:
logger.info(SitePackagesDir())
else:
logger.info("Python executable: {}".format(sys.executable))
logger.info("Python version: {}".format(platform.python_version()))
logger.info("Python site-packages: {}".format(SitePackagesDir()))
logger.info("Python sitecustomize: {}".format(SiteCustomizeFile()))
# https://stackoverflow.com/questions/4152963/get-the-name-of-current-script-with-python
#logger.info("Pout executable: {}".format(subprocess.check_output(["which", "pout"])))
logger.info("Pout executable: {}".format(os.path.abspath(os.path.expanduser(str(sys.argv[0])))))
logger.info("Pout version: {}".format(pout.__version__))
filepath = SiteCustomizeFile()
logger.info("Pout injected: {}".format(filepath.is_injected()))
|
def main_info(args)
|
Just prints out info about the pout installation
.. since:: 2018-08-20
:param args: Namespace, the parsed CLI arguments passed into the application
:returns: int, the return code of the CLI
| 3.234375
| 3.153906
| 1.025514
|
try:
func(*args, **kwargs)
except Exception as e:
return e
|
def catch(func, *args, **kwargs)
|
Call the supplied function with the supplied arguments,
catching and returning any exception that it throws.
Arguments:
func: the function to run.
*args: positional arguments to pass into the function.
**kwargs: keyword arguments to pass into the function.
Returns:
If the function throws an exception, return the exception.
If the function does not throw an exception, return None.
| 3.25982
| 3.847343
| 0.847291
|
start_time = time_module.time()
func(*args, **kwargs)
end_time = time_module.time()
return end_time - start_time
|
def time(func, *args, **kwargs)
|
Call the supplied function with the supplied arguments,
and return the total execution time as a float in seconds.
The precision of the returned value depends on the precision of
`time.time()` on your platform.
Arguments:
func: the function to run.
*args: positional arguments to pass into the function.
**kwargs: keyword arguments to pass into the function.
Returns:
Execution time of the function as a float in seconds.
| 2.291855
| 3.120487
| 0.734454
|
# https://github.com/nose-devs/nose/blob/master/nose/tools/nontrivial.py
pdb.Pdb(stdout=sys.__stdout__).set_trace(sys._getframe().f_back)
|
def set_trace()
|
Start a Pdb instance at the calling frame, with stdout routed to sys.__stdout__.
| 2.966886
| 2.679972
| 1.107058
|
n_min = self.numargs + 2
if len(data) > 0:
if len(data) <= n_min:
raise ValueError("At least {} data points must be provided.".format(n_min))
lmom_ratios = lm.lmom_ratios(data, nmom=n_min)
elif not lmom_ratios:
raise Exception("Either `data` or `lmom_ratios` must be provided.")
elif len(lmom_ratios) < n_min:
raise ValueError("At least {} number of L-moments must be provided.".format(n_min))
return self._lmom_fit(lmom_ratios)
|
def lmom_fit(self, data=[], lmom_ratios=[])
|
Fit the distribution function to the given data or given L-moments.
:param data: Data to use in calculating the distribution parameters
:type data: array_like
:param lmom_ratios: L-moments (ratios) l1, l2, t3, t4, .. to use in calculating the distribution parameters
:type lmom_ratios: array_like
:returns: Distribution parameters in `scipy` order, e.g. scale, loc, shape
:rtype: :class:`OrderedDict`
| 2.894167
| 2.963572
| 0.976581
|
ratios = self.lmom_ratios(*args, nmom=nmom, **kwds)
moments = ratios[0:2]
moments += [ratio * moments[1] for ratio in ratios[2:]]
return moments
|
def lmom(self, *args, nmom=5, **kwds)
|
Compute the distribution's L-moments, e.g. l1, l2, l3, l4, ..
:param args: Distribution parameters in order of shape(s), loc, scale
:type args: float
:param nmom: Number of moments to calculate
:type nmom: int
:param kwds: Distribution parameters as named arguments. See :attr:`rv_continous.shapes` for names of shape
parameters
:type kwds: float
:returns: List of L-moments
:rtype: list
| 4.021928
| 3.779906
| 1.064029
|
if nmom > 20:
return ValueError("Parameter nmom too large. Max of 20.")
shapes, loc, scale = self._parse_args(*args, **kwds)
if scale <= 0:
return ValueError("Invalid scale parameter.")
return self._lmom_ratios(*shapes, loc=loc, scale=scale, nmom=nmom)
|
def lmom_ratios(self, *args, nmom=5, **kwds)
|
Compute the distribution's L-moment ratios, e.g. l1, l2, t3, t4, ..
:param args: Distribution parameters in order of shape(s), loc, scale
:type args: float
:param nmom: Number of moments to calculate
:type nmom: int
:param kwds: Distribution parameters as named arguments. See :attr:`rv_continous.shapes` for names of shape
parameters
:type kwds: float
:returns: List of L-moment ratios
:rtype: list
| 4.392749
| 4.056904
| 1.082784
|
plugin_list = load_plugins()
module = sys.modules['__main__']
plugin_list.insert(0, ObjectSupplier(module))
return run_with_plugins(plugin_list)
|
def run()
|
Run all the test classes in the main module.
Returns: exit code as an integer.
The default behaviour (which may be overridden by plugins) is to return a 0
exit code if the test run succeeded, and 1 if it failed.
| 7.113204
| 7.924142
| 0.897662
|
composite = core.PluginComposite(plugin_list)
to_run = composite.get_object_to_run()
test_run = core.TestRun(to_run, composite)
test_run.run()
return composite.get_exit_code()
|
def run_with_plugins(plugin_list)
|
Carry out a test run with the supplied list of plugin instances.
The plugins are expected to identify the object to run.
Parameters:
plugin_list: a list of plugin instances (objects which implement some subset of PluginInterface)
Returns: exit code as an integer.
The default behaviour (which may be overridden by plugins) is to return a 0
exit code if the test run succeeded, and 1 if it failed.
| 5.387253
| 5.079962
| 1.060491
|
self.spec_file = args and args.specs or None
self.cwd = cwd or os.getcwd()
self.file = file
return self.spec_file is not None
|
def initialise(self, args=None, env=None, file=None, cwd=None)
|
Filthy hack: we provide file and cwd here rather than as constructor
args because Mr Hodgson decided to pass stdout as the only parameter
to __init__.
File should only be passed during tests.
| 4.56517
| 4.512582
| 1.011654
|
if nmom <= 5:
return _samlmusmall(data, nmom)
else:
return _samlmularge(data, nmom)
|
def lmom_ratios(data, nmom=5)
|
Estimate `nmom` number of L-moments from a sample `data`.
:param data: Sequence of (sample) data
:type data: list or array-like sequence
:param nmom: number of L-moments to estimate
:type nmom: int
:return: L-moment ratios like this: l1, l2, t3, t4, t5, .. . As in: items 3 and higher are L-moment ratios.
:rtype: list
| 8.812089
| 9.734569
| 0.905237
|
url = self._construct_url(url_path)
payload = kwargs
payload.update({'api_token': self.api_token})
return self._make_request(url, payload, headers)
|
def _run(self, url_path, headers=None, **kwargs)
|
Requests API
| 3.859255
| 3.596426
| 1.07308
|
warnings.warn(
"POEditor API v1 is deprecated. Use POEditorAPI._run method to call API v2",
DeprecationWarning, stacklevel=2
)
url = "https://poeditor.com/api/"
payload = kwargs
payload.update({'action': action, 'api_token': self.api_token})
return self._make_request(url, payload, headers)
|
def _apiv1_run(self, action, headers=None, **kwargs)
|
Kept for backwards compatibility of this client
See "self.clear_reference_language"
| 4.643667
| 4.653016
| 0.997991
|
open_ = False if not data['open'] or data['open'] == '0' else True
public = False if not data['public'] or data['public'] == '0' else True
output = {
'created': parse_datetime(data['created']),
'id': int(data['id']),
'name': data['name'],
'open': open_,
'public': public,
}
# the detail view returns more info than the list view
# see https://poeditor.com/docs/api#projects_view
for key in ['description', 'reference_language', 'terms']:
if key in data:
output[key] = data[key]
return output
|
def _project_formatter(self, data)
|
Project object
| 3.991826
| 3.796995
| 1.051312
|
data = self._run(
url_path="projects/list"
)
projects = data['result'].get('projects', [])
return [self._project_formatter(item) for item in projects]
|
def list_projects(self)
|
Returns the list of projects owned by user.
| 6.249891
| 5.50211
| 1.135908
|
description = description or ''
data = self._run(
url_path="projects/add",
name=name,
description=description
)
return data['result']['project']['id']
|
def create_project(self, name, description=None)
|
creates a new project. Returns the id of the project (if successful)
| 6.088038
| 5.465326
| 1.113939
|
kwargs = {}
if name is not None:
kwargs['name'] = name
if description is not None:
kwargs['description'] = description
if reference_language is not None:
kwargs['reference_language'] = reference_language
data = self._run(
url_path="projects/update",
id=project_id,
**kwargs
)
return data['result']['project']['id']
|
def update_project(self, project_id, name=None, description=None,
reference_language=None)
|
Updates project settings (name, description, reference language)
If optional parameters are not sent, their respective fields are not updated.
| 2.557462
| 2.62539
| 0.974126
|
data = self._run(
url_path="projects/view",
id=project_id
)
return self._project_formatter(data['result']['project'])
|
def view_project_details(self, project_id)
|
Returns project's details.
| 8.643789
| 7.639503
| 1.13146
|
data = self._run(
url_path="languages/list",
id=project_id
)
return data['result'].get('languages', [])
|
def list_project_languages(self, project_id)
|
Returns project languages, percentage of translation done for each and the
datetime (UTC - ISO 8601) when the last change was made.
| 7.356534
| 7.577388
| 0.970854
|
self._run(
url_path="languages/add",
id=project_id,
language=language_code
)
return True
|
def add_language_to_project(self, project_id, language_code)
|
Adds a new language to project
| 7.59166
| 6.99683
| 1.085014
|
kwargs = {}
if fuzzy_trigger is not None:
kwargs['fuzzy_trigger'] = fuzzy_trigger
data = self._run(
url_path="terms/update",
id=project_id,
data=json.dumps(data),
**kwargs
)
return data['result']['terms']
|
def update_terms(self, project_id, data, fuzzy_trigger=None)
|
Updates project terms. Lets you change the text, context, reference, plural and tags.
>>> data = [
{
"term": "Add new list",
"context": "",
"new_term": "Save list",
"new_context": "",
"reference": "\/projects",
"plural": "",
"comment": "",
"tags": [
"first_tag",
"second_tag"
]
},
{
"term": "Display list",
"context": "",
"new_term": "Show list",
"new_context": ""
}
]
| 3.459944
| 3.439026
| 1.006082
|
data = self._run(
url_path="terms/add_comment",
id=project_id,
data=json.dumps(data)
)
return data['result']['terms']
|
def add_comment(self, project_id, data)
|
Adds comments to existing terms.
>>> data = [
{
"term": "Add new list",
"context": "",
"comment": "This is a button"
},
{
"term": "one project found",
"context": "",
"comment": "Make sure you translate the plural forms"
},
{
"term": "Show all projects",
"context": "",
"comment": "This is a button"
}
]
| 6.996257
| 5.871839
| 1.191493
|
kwargs = {}
if fuzzy_trigger is not None:
kwargs['fuzzy_trigger'] = fuzzy_trigger
data = self._run(
url_path="languages/update",
id=project_id,
language=language_code,
data=json.dumps(data),
**kwargs
)
return data['result']['translations']
|
def update_project_language(self, project_id, language_code, data, fuzzy_trigger=None)
|
Inserts / overwrites translations.
>>> data = [
{
"term": "Projects",
"context": "project list",
"translation": {
"content": "Des projets",
"fuzzy": 0
}
}
]
| 3.425697
| 3.460057
| 0.99007
|
if file_type not in self.FILE_TYPES:
raise POEditorArgsException(
'content_type: file format {}'.format(self.FILE_TYPES))
if filters and isinstance(filters, str) and filters not in self.FILTER_BY:
raise POEditorArgsException(
"filters - filter results by {}".format(self.FILTER_BY))
elif filters and set(filters).difference(set(self.FILTER_BY)):
raise POEditorArgsException(
"filters - filter results by {}".format(self.FILTER_BY))
data = self._run(
url_path="projects/export",
id=project_id,
language=language_code,
type=file_type,
filters=filters,
tags=tags
)
# The link of the file (expires after 10 minutes).
file_url = data['result']['url']
# Download file content:
res = requests.get(file_url, stream=True)
if not local_file:
tmp_file = tempfile.NamedTemporaryFile(
delete=False, suffix='.{}'.format(file_type))
tmp_file.close()
local_file = tmp_file.name
with open(local_file, 'w+b') as po_file:
for data in res.iter_content(chunk_size=1024):
po_file.write(data)
return file_url, local_file
|
def export(self, project_id, language_code, file_type='po', filters=None,
tags=None, local_file=None)
|
Return terms / translations
filters - filter by self._filter_by
tags - filter results by tags;
local_file - save content into it. If None, save content into
random temp file.
>>> tags = 'name-of-tag'
>>> tags = ["name-of-tag"]
>>> tags = ["name-of-tag", "name-of-another-tag"]
>>> filters = 'translated'
>>> filters = ["translated"]
>>> filters = ["translated", "not_fuzzy"]
| 2.994526
| 2.951842
| 1.01446
|
options = [
self.UPDATING_TERMS,
self.UPDATING_TERMS_TRANSLATIONS,
self.UPDATING_TRANSLATIONS
]
if updating not in options:
raise POEditorArgsException(
'Updating arg must be in {}'.format(options)
)
options = [
self.UPDATING_TERMS_TRANSLATIONS,
self.UPDATING_TRANSLATIONS
]
if language_code is None and updating in options:
raise POEditorArgsException(
'Language code is required only if updating is '
'terms_translations or translations)'
)
if updating == self.UPDATING_TRANSLATIONS:
tags = None
sync_terms = None
# Special content type:
tags = tags or ''
language_code = language_code or ''
sync_terms = '1' if sync_terms else '0'
overwrite = '1' if overwrite else '0'
fuzzy_trigger = '1' if fuzzy_trigger else '0'
project_id = str(project_id)
with open(file_path, 'r+b') as local_file:
data = self._run(
url_path="projects/upload",
id=project_id,
language=language_code,
file=local_file,
updating=updating,
tags=tags,
sync_terms=sync_terms,
overwrite=overwrite,
fuzzy_trigger=fuzzy_trigger
)
return data['result']
|
def _upload(self, project_id, updating, file_path, language_code=None,
overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None)
|
Internal: updates terms / translations
File uploads are limited to one every 30 seconds
| 2.823787
| 2.775344
| 1.017455
|
return self._upload(
project_id=project_id,
updating=self.UPDATING_TERMS,
file_path=file_path,
language_code=language_code,
overwrite=overwrite,
sync_terms=sync_terms,
tags=tags,
fuzzy_trigger=fuzzy_trigger
)
|
def update_terms(self, project_id, file_path=None, language_code=None,
overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None)
|
Updates terms
overwrite: set it to True if you want to overwrite translations
sync_terms: set it to True if you want to sync your terms (terms that
are not found in the uploaded file will be deleted from project
and the new ones added). Ignored if updating = translations
tags: Add tags to the project terms; available when updating terms or terms_translations;
you can use the following keys: "all" - for the all the imported terms, "new" - for
the terms which aren't already in the project, "obsolete" - for the terms which are
in the project but not in the imported file and "overwritten_translations" - for the
terms for which translations change
fuzzy_trigger: set it to True to mark corresponding translations from the
other languages as fuzzy for the updated values
| 2.15209
| 2.263
| 0.95099
|
return self._upload(
project_id=project_id,
updating=self.UPDATING_TERMS_TRANSLATIONS,
file_path=file_path,
language_code=language_code,
overwrite=overwrite,
sync_terms=sync_terms,
tags=tags,
fuzzy_trigger=fuzzy_trigger
)
|
def update_terms_translations(self, project_id, file_path=None,
language_code=None, overwrite=False,
sync_terms=False, tags=None, fuzzy_trigger=None)
|
Updates terms translations
overwrite: set it to True if you want to overwrite translations
sync_terms: set it to True if you want to sync your terms (terms that
are not found in the uploaded file will be deleted from project
and the new ones added). Ignored if updating = translations
tags: Add tags to the project terms; available when updating terms or terms_translations;
you can use the following keys: "all" - for the all the imported terms, "new" - for
the terms which aren't already in the project, "obsolete" - for the terms which are
in the project but not in the imported file and "overwritten_translations" - for the
terms for which translations change
fuzzy_trigger: set it to True to mark corresponding translations from the
other languages as fuzzy for the updated values
| 2.153485
| 2.231086
| 0.965218
|
return self._upload(
project_id=project_id,
updating=self.UPDATING_TRANSLATIONS,
file_path=file_path,
language_code=language_code,
overwrite=overwrite,
fuzzy_trigger=fuzzy_trigger
)
|
def update_translations(self, project_id, file_path=None,
language_code=None, overwrite=False, fuzzy_trigger=None)
|
Updates translations
overwrite: set it to True if you want to overwrite definitions
fuzzy_trigger: set it to True to mark corresponding translations from the
other languages as fuzzy for the updated values
| 2.553104
| 2.822958
| 0.904407
|
data = self._run(
url_path="contributors/list",
id=project_id,
language=language_code
)
return data['result'].get('contributors', [])
|
def list_contributors(self, project_id=None, language_code=None)
|
Returns the list of contributors
| 5.680344
| 5.536276
| 1.026022
|
self._run(
url_path="contributors/add",
id=project_id,
name=name,
email=email,
language=language_code
)
return True
|
def add_contributor(self, project_id, name, email, language_code)
|
Adds a contributor to a project language
| 4.534467
| 4.846455
| 0.935626
|
self._run(
url_path="contributors/add",
id=project_id,
name=name,
email=email,
admin=True
)
return True
|
def add_administrator(self, project_id, name, email)
|
Adds a contributor to a project language
| 6.319361
| 6.110998
| 1.034096
|
self._run(
url_path="contributors/remove",
id=project_id,
email=email,
language=language
)
return True
|
def remove_contributor(self, project_id, email, language)
|
Removes a contributor
| 5.638948
| 5.871542
| 0.960386
|
html = html.replace('SLs.sls=', '').replace(';SLs.showSuggestion();', '')
html = json.loads(html)
return html['suggestions']
|
def parse_stations(html)
|
Strips JS code, loads JSON
| 12.405407
| 10.089741
| 1.229507
|
# parse data from the details view
rsp = requests.get(data['details'])
soup = BeautifulSoup(rsp.text, "html.parser")
# get departure delay
delay_departure_raw = soup.find('div', class_="routeStart").find('span', class_=["delay", "delayOnTime"])
if delay_departure_raw:
delay_departure = calculate_delay(data['departure'],
delay_departure_raw.text)
else:
delay_departure = 0
# get arrival delay
delay_arrival_raw = soup.find('div', class_=["routeEnd","routeEndAdditional"]).find('span', class_=["delay", "delayOnTime"])
if delay_arrival_raw:
delay_arrival = calculate_delay(data['arrival'],
delay_arrival_raw.text)
else:
delay_arrival = 0
# save the parsed data
if delay_departure + delay_arrival == 0:
data['ontime'] = True
else:
data['ontime'] = False
data['delay'] = {
'delay_departure': int(delay_departure),
'delay_arrival': int(delay_arrival)
}
# TODO: this should not be hardcoded!
data['canceled'] = False
return data
|
def parse_delay(data)
|
Prase the delay
| 2.720852
| 2.728339
| 0.997256
|
original = datetime.strptime(original, '%H:%M')
delayed = datetime.strptime(delay, '%H:%M')
diff = delayed - original
return diff.total_seconds() // 60
|
def calculate_delay(original, delay)
|
Calculate the delay
| 2.728044
| 2.839602
| 0.960714
|
query = {
'start': 1,
'S': station + '?',
'REQ0JourneyStopsB': limit
}
rsp = requests.get('http://reiseauskunft.bahn.de/bin/ajax-getstop.exe/dn', params=query)
return parse_stations(rsp.text)
|
def stations(self, station, limit=10)
|
Find stations for given queries
Args:
station (str): search query
limit (int): limit number of results
| 12.048142
| 13.357241
| 0.901993
|
query = {
'S': origin,
'Z': destination,
'date': dt.strftime("%d.%m.%y"),
'time': dt.strftime("%H:%M"),
'start': 1,
'REQ0JourneyProduct_opt0': 1 if only_direct else 0
}
rsp = requests.get('http://mobile.bahn.de/bin/mobil/query.exe/dox?', params=query)
return parse_connections(rsp.text)
|
def connections(self, origin, destination, dt=datetime.now(), only_direct=False)
|
Find connections between two stations
Args:
origin (str): origin station
destination (str): destination station
dt (datetime): date and time for query
only_direct (bool): only direct connections
| 6.616667
| 6.831784
| 0.968512
|
chunklen = int(math.ceil(float(len(sequence)) / float(n)))
return [
sequence[ i*chunklen : (i+1)*chunklen ] for i in range(n)
]
|
def _scatter(sequence, n)
|
Scatters elements of ``sequence`` into ``n`` blocks.
| 3.167078
| 3.086982
| 1.025946
|
tag = tag if tag else None
tasks = self._api.lease(
numTasks=num_tasks,
seconds=seconds,
groupByTag=(tag is not None),
tag=tag,
)
if not len(tasks):
raise QueueEmpty
task = tasks[0]
return totask(task)
|
def lease(self, seconds=600, num_tasks=1, tag=None)
|
Acquires a lease on the topmost N unowned tasks in the specified queue.
Required query parameters: leaseSecs, numTasks
| 6.458703
| 6.287451
| 1.027237
|
try:
return self._api.purge()
except AttributeError:
while True:
lst = self.list()
if len(lst) == 0:
break
for task in lst:
self.delete(task)
self.wait()
return self
|
def purge(self)
|
Deletes all tasks in the queue.
| 5.553281
| 4.70793
| 1.179559
|
global LOOP
if not callable(stop_fn) and stop_fn is not None:
raise ValueError("stop_fn must be a callable. " + str(stop_fn))
elif not callable(stop_fn):
stop_fn = lambda: False
def random_exponential_window_backoff(n):
n = min(n, min_backoff_window)
# 120 sec max b/c on avg a request every ~250msec if 500 containers
# in contention which seems like a quite reasonable volume of traffic
# to handle
high = min(2 ** n, max_backoff_window)
return random.uniform(0, high)
def printv(*args, **kwargs):
if verbose:
print(*args, **kwargs)
LOOP = True
def sigint_handler(signum, frame):
global LOOP
printv("Interrupted. Exiting after this task completes...")
LOOP = False
prev_sigint_handler = signal.getsignal(signal.SIGINT)
signal.signal(signal.SIGINT, sigint_handler)
if log_fn is None:
log_fn = printv
tries = 0
executed = 0
backoff = False
backoff_exceptions = tuple(list(backoff_exceptions) + [ QueueEmpty ])
while LOOP:
task = 'unknown' # for error message prior to leasing
try:
task = self.lease(seconds=int(lease_seconds))
tries += 1
printv(task)
task.execute(*execute_args, **execute_kwargs)
executed += 1
printv("Delete enqueued task...")
self.delete(task)
log_fn('INFO', task , "succesfully executed")
tries = 0
except backoff_exceptions:
backoff = True
except Exception as e:
printv('ERROR', task, "raised {}\n {}".format(e , traceback.format_exc()))
raise #this will restart the container in kubernetes
if stop_fn():
break
if backoff:
time.sleep(random_exponential_window_backoff(tries))
backoff = False
printv("Task execution loop exited.")
signal.signal(signal.SIGINT, prev_sigint_handler)
return executed
|
def poll(
self, lease_seconds=LEASE_SECONDS, tag=None,
verbose=False, execute_args=[], execute_kwargs={},
stop_fn=None, backoff_exceptions=[], min_backoff_window=30,
max_backoff_window=120, log_fn=None
)
|
Poll a queue until a stop condition is reached (default forever). Note
that this function is not thread safe as it requires a global variable
to intercept SIGINT.
lease_seconds: each task should be leased for this many seconds
tag: if specified, query for only tasks that match this tag
execute_args / execute_kwargs: pass these arguments to task execution
backoff_exceptions: A list of exceptions that instead of causing a crash,
instead cause the polling to back off for an increasing exponential
random window.
min_backoff_window: The minimum sized window (in seconds) to select a
random backoff time.
max_backoff_window: The window doubles each retry. This is the maximum value
in seconds.
stop_fn: A boolean returning function that accepts no parameters. When
it returns True, the task execution loop will terminate. It is evaluated
once after every task.
log_fn: Feed error messages to this function, default print (when verbose is enabled).
verbose: print out the status of each step
Return: number of tasks executed
| 4.636437
| 4.573469
| 1.013768
|
body = {
"payload": task.payload(),
"queueName": self._queue_name,
"groupByTag": True,
"tag": task.__class__.__name__
}
def cloud_insertion(api):
api.insert(body, delay_seconds)
if len(self._threads):
self.put(cloud_insertion)
else:
cloud_insertion(self._api)
return self
|
def insert(self, task, args=[], kwargs={}, delay_seconds=0)
|
Insert a task into an existing queue.
| 6.364992
| 6.117147
| 1.040516
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.