_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q18200
|
Facts.load_facts
|
train
|
def load_facts(self, facts):
"""Load a set of facts into the CLIPS data base.
The C equivalent of the CLIPS load-facts command.
Facts can be loaded from a string or from a text file.
"""
facts = facts.encode()
if os.path.exists(facts):
ret = lib.EnvLoadFacts(self._env, facts)
if ret == -1:
raise CLIPSError(self._env)
else:
ret = lib.EnvLoadFactsFromString(self._env, facts, -1)
if ret == -1:
raise CLIPSError(self._env)
return ret
|
python
|
{
"resource": ""
}
|
q18201
|
Facts.save_facts
|
train
|
def save_facts(self, path, mode=SaveMode.LOCAL_SAVE):
"""Save the facts in the system to the specified file.
The Python equivalent of the CLIPS save-facts command.
"""
ret = lib.EnvSaveFacts(self._env, path.encode(), mode)
if ret == -1:
raise CLIPSError(self._env)
return ret
|
python
|
{
"resource": ""
}
|
q18202
|
Fact.asserted
|
train
|
def asserted(self):
"""True if the fact has been asserted within CLIPS."""
# https://sourceforge.net/p/clipsrules/discussion/776945/thread/4f04bb9e/
if self.index == 0:
return False
return bool(lib.EnvFactExistp(self._env, self._fact))
|
python
|
{
"resource": ""
}
|
q18203
|
Fact.template
|
train
|
def template(self):
"""The associated Template."""
return Template(
self._env, lib.EnvFactDeftemplate(self._env, self._fact))
|
python
|
{
"resource": ""
}
|
q18204
|
Fact.assertit
|
train
|
def assertit(self):
"""Assert the fact within the CLIPS environment."""
if self.asserted:
raise RuntimeError("Fact already asserted")
lib.EnvAssignFactSlotDefaults(self._env, self._fact)
if lib.EnvAssert(self._env, self._fact) == ffi.NULL:
raise CLIPSError(self._env)
|
python
|
{
"resource": ""
}
|
q18205
|
Fact.retract
|
train
|
def retract(self):
"""Retract the fact from the CLIPS environment."""
if lib.EnvRetract(self._env, self._fact) != 1:
raise CLIPSError(self._env)
|
python
|
{
"resource": ""
}
|
q18206
|
ImpliedFact.append
|
train
|
def append(self, value):
"""Append an element to the fact."""
if self.asserted:
raise RuntimeError("Fact already asserted")
self._multifield.append(value)
|
python
|
{
"resource": ""
}
|
q18207
|
ImpliedFact.extend
|
train
|
def extend(self, values):
"""Append multiple elements to the fact."""
if self.asserted:
raise RuntimeError("Fact already asserted")
self._multifield.extend(values)
|
python
|
{
"resource": ""
}
|
q18208
|
ImpliedFact.assertit
|
train
|
def assertit(self):
"""Assert the fact within CLIPS."""
data = clips.data.DataObject(self._env)
data.value = list(self._multifield)
if lib.EnvPutFactSlot(
self._env, self._fact, ffi.NULL, data.byref) != 1:
raise CLIPSError(self._env)
super(ImpliedFact, self).assertit()
|
python
|
{
"resource": ""
}
|
q18209
|
TemplateFact.update
|
train
|
def update(self, sequence=None, **mapping):
"""Add multiple elements to the fact."""
if sequence is not None:
if isinstance(sequence, dict):
for slot in sequence:
self[slot] = sequence[slot]
else:
for slot, value in sequence:
self[slot] = value
if mapping:
for slot in sequence:
self[slot] = sequence[slot]
|
python
|
{
"resource": ""
}
|
q18210
|
Template.name
|
train
|
def name(self):
"""Template name."""
return ffi.string(
lib.EnvGetDeftemplateName(self._env, self._tpl)).decode()
|
python
|
{
"resource": ""
}
|
q18211
|
Template.module
|
train
|
def module(self):
"""The module in which the Template is defined.
Python equivalent of the CLIPS deftemplate-module command.
"""
modname = ffi.string(lib.EnvDeftemplateModule(self._env, self._tpl))
defmodule = lib.EnvFindDefmodule(self._env, modname)
return Module(self._env, defmodule)
|
python
|
{
"resource": ""
}
|
q18212
|
Template.watch
|
train
|
def watch(self, flag):
"""Whether or not the Template is being watched."""
lib.EnvSetDeftemplateWatch(self._env, int(flag), self._tpl)
|
python
|
{
"resource": ""
}
|
q18213
|
Template.slots
|
train
|
def slots(self):
"""Iterate over the Slots of the Template."""
if self.implied:
return ()
data = clips.data.DataObject(self._env)
lib.EnvDeftemplateSlotNames(self._env, self._tpl, data.byref)
return tuple(
TemplateSlot(self._env, self._tpl, n.encode()) for n in data.value)
|
python
|
{
"resource": ""
}
|
q18214
|
Template.new_fact
|
train
|
def new_fact(self):
"""Create a new Fact from this template."""
fact = lib.EnvCreateFact(self._env, self._tpl)
if fact == ffi.NULL:
raise CLIPSError(self._env)
return new_fact(self._env, fact)
|
python
|
{
"resource": ""
}
|
q18215
|
Template.undefine
|
train
|
def undefine(self):
"""Undefine the Template.
Python equivalent of the CLIPS undeftemplate command.
The object becomes unusable after this method has been called.
"""
if lib.EnvUndeftemplate(self._env, self._tpl) != 1:
raise CLIPSError(self._env)
|
python
|
{
"resource": ""
}
|
q18216
|
TemplateSlot.multifield
|
train
|
def multifield(self):
"""True if the slot is a multifield slot."""
return bool(lib.EnvDeftemplateSlotMultiP(
self._env, self._tpl, self._name))
|
python
|
{
"resource": ""
}
|
q18217
|
TemplateSlot.default_type
|
train
|
def default_type(self):
"""The default value type for this Slot.
The Python equivalent of the CLIPS deftemplate-slot-defaultp function.
"""
return TemplateSlotDefaultType(
lib.EnvDeftemplateSlotDefaultP(self._env, self._tpl, self._name))
|
python
|
{
"resource": ""
}
|
q18218
|
Classes.instances_changed
|
train
|
def instances_changed(self):
"""True if any instance has changed."""
value = bool(lib.EnvGetInstancesChanged(self._env))
lib.EnvSetInstancesChanged(self._env, int(False))
return value
|
python
|
{
"resource": ""
}
|
q18219
|
Classes.classes
|
train
|
def classes(self):
"""Iterate over the defined Classes."""
defclass = lib.EnvGetNextDefclass(self._env, ffi.NULL)
while defclass != ffi.NULL:
yield Class(self._env, defclass)
defclass = lib.EnvGetNextDefclass(self._env, defclass)
|
python
|
{
"resource": ""
}
|
q18220
|
Classes.find_class
|
train
|
def find_class(self, name):
"""Find the Class by its name."""
defclass = lib.EnvFindDefclass(self._env, name.encode())
if defclass == ffi.NULL:
raise LookupError("Class '%s' not found" % name)
return Class(self._env, defclass)
|
python
|
{
"resource": ""
}
|
q18221
|
Classes.instances
|
train
|
def instances(self):
"""Iterate over the defined Instancees."""
definstance = lib.EnvGetNextInstance(self._env, ffi.NULL)
while definstance != ffi.NULL:
yield Instance(self._env, definstance)
definstance = lib.EnvGetNextInstance(self._env, definstance)
|
python
|
{
"resource": ""
}
|
q18222
|
Classes.find_instance
|
train
|
def find_instance(self, name, module=None):
"""Find the Instance by its name."""
module = module if module is not None else ffi.NULL
definstance = lib.EnvFindInstance(self._env, module, name.encode(), 1)
if definstance == ffi.NULL:
raise LookupError("Instance '%s' not found" % name)
return Instance(self._env, definstance)
|
python
|
{
"resource": ""
}
|
q18223
|
Classes.load_instances
|
train
|
def load_instances(self, instances):
"""Load a set of instances into the CLIPS data base.
The C equivalent of the CLIPS load-instances command.
Instances can be loaded from a string,
from a file or from a binary file.
"""
instances = instances.encode()
if os.path.exists(instances):
try:
return self._load_instances_binary(instances)
except CLIPSError:
return self._load_instances_text(instances)
else:
return self._load_instances_string(instances)
|
python
|
{
"resource": ""
}
|
q18224
|
Classes.restore_instances
|
train
|
def restore_instances(self, instances):
"""Restore a set of instances into the CLIPS data base.
The Python equivalent of the CLIPS restore-instances command.
Instances can be passed as a set of strings or as a file.
"""
instances = instances.encode()
if os.path.exists(instances):
ret = lib.EnvRestoreInstances(self._env, instances)
if ret == -1:
raise CLIPSError(self._env)
else:
ret = lib.EnvRestoreInstancesFromString(self._env, instances, -1)
if ret == -1:
raise CLIPSError(self._env)
return ret
|
python
|
{
"resource": ""
}
|
q18225
|
Classes.save_instances
|
train
|
def save_instances(self, path, binary=False, mode=SaveMode.LOCAL_SAVE):
"""Save the instances in the system to the specified file.
If binary is True, the instances will be saved in binary format.
The Python equivalent of the CLIPS save-instances command.
"""
if binary:
ret = lib.EnvBinarySaveInstances(self._env, path.encode(), mode)
else:
ret = lib.EnvSaveInstances(self._env, path.encode(), mode)
if ret == 0:
raise CLIPSError(self._env)
return ret
|
python
|
{
"resource": ""
}
|
q18226
|
Classes.make_instance
|
train
|
def make_instance(self, command):
"""Create and initialize an instance of a user-defined class.
command must be a string in the form:
(<instance-name> of <class-name> <slot-override>*)
<slot-override> :== (<slot-name> <constant>*)
Python equivalent of the CLIPS make-instance command.
"""
ist = lib.EnvMakeInstance(self._env, command.encode())
if ist == ffi.NULL:
raise CLIPSError(self._env)
return Instance(self._env, ist)
|
python
|
{
"resource": ""
}
|
q18227
|
Class.name
|
train
|
def name(self):
"""Class name."""
return ffi.string(lib.EnvGetDefclassName(self._env, self._cls)).decode()
|
python
|
{
"resource": ""
}
|
q18228
|
Class.module
|
train
|
def module(self):
"""The module in which the Class is defined.
Python equivalent of the CLIPS defglobal-module command.
"""
modname = ffi.string(lib.EnvDefclassModule(self._env, self._cls))
defmodule = lib.EnvFindDefmodule(self._env, modname)
return Module(self._env, defmodule)
|
python
|
{
"resource": ""
}
|
q18229
|
Class.watch_instances
|
train
|
def watch_instances(self, flag):
"""Whether or not the Class Instances are being watched."""
lib.EnvSetDefclassWatchInstances(self._env, int(flag), self._cls)
|
python
|
{
"resource": ""
}
|
q18230
|
Class.watch_slots
|
train
|
def watch_slots(self, flag):
"""Whether or not the Class Slots are being watched."""
lib.EnvSetDefclassWatchSlots(self._env, int(flag), self._cls)
|
python
|
{
"resource": ""
}
|
q18231
|
Class.new_instance
|
train
|
def new_instance(self, name):
"""Create a new raw instance from this Class.
No slot overrides or class default initializations
are performed for the instance.
This function bypasses message-passing.
"""
ist = lib.EnvCreateRawInstance(self._env, self._cls, name.encode())
if ist == ffi.NULL:
raise CLIPSError(self._env)
return Instance(self._env, ist)
|
python
|
{
"resource": ""
}
|
q18232
|
Class.find_message_handler
|
train
|
def find_message_handler(self, handler_name, handler_type='primary'):
"""Returns the MessageHandler given its name and type for this class."""
ret = lib.EnvFindDefmessageHandler(
self._env, self._cls, handler_name.encode(), handler_type.encode())
if ret == 0:
raise CLIPSError(self._env)
return MessageHandler(self._env, self._cls, ret)
|
python
|
{
"resource": ""
}
|
q18233
|
Class.subclass
|
train
|
def subclass(self, klass):
"""True if the Class is a subclass of the given one."""
return bool(lib.EnvSubclassP(self._env, self._cls, klass._cls))
|
python
|
{
"resource": ""
}
|
q18234
|
Class.superclass
|
train
|
def superclass(self, klass):
"""True if the Class is a superclass of the given one."""
return bool(lib.EnvSuperclassP(self._env, self._cls, klass._cls))
|
python
|
{
"resource": ""
}
|
q18235
|
Class.slots
|
train
|
def slots(self, inherited=False):
"""Iterate over the Slots of the class."""
data = clips.data.DataObject(self._env)
lib.EnvClassSlots(self._env, self._cls, data.byref, int(inherited))
return (ClassSlot(self._env, self._cls, n.encode()) for n in data.value)
|
python
|
{
"resource": ""
}
|
q18236
|
Class.instances
|
train
|
def instances(self):
"""Iterate over the instances of the class."""
ist = lib.EnvGetNextInstanceInClass(self._env, self._cls, ffi.NULL)
while ist != ffi.NULL:
yield Instance(self._env, ist)
ist = lib.EnvGetNextInstanceInClass(self._env, self._cls, ist)
|
python
|
{
"resource": ""
}
|
q18237
|
Class.subclasses
|
train
|
def subclasses(self, inherited=False):
"""Iterate over the subclasses of the class.
This function is the Python equivalent
of the CLIPS class-subclasses command.
"""
data = clips.data.DataObject(self._env)
lib.EnvClassSubclasses(self._env, self._cls, data.byref, int(inherited))
for klass in classes(self._env, data.value):
yield klass
|
python
|
{
"resource": ""
}
|
q18238
|
Class.superclasses
|
train
|
def superclasses(self, inherited=False):
"""Iterate over the superclasses of the class.
This function is the Python equivalent
of the CLIPS class-superclasses command.
"""
data = clips.data.DataObject(self._env)
lib.EnvClassSuperclasses(
self._env, self._cls, data.byref, int(inherited))
for klass in classes(self._env, data.value):
yield klass
|
python
|
{
"resource": ""
}
|
q18239
|
Class.message_handlers
|
train
|
def message_handlers(self):
"""Iterate over the message handlers of the class."""
index = lib.EnvGetNextDefmessageHandler(self._env, self._cls, 0)
while index != 0:
yield MessageHandler(self._env, self._cls, index)
index = lib.EnvGetNextDefmessageHandler(self._env, self._cls, index)
|
python
|
{
"resource": ""
}
|
q18240
|
Class.undefine
|
train
|
def undefine(self):
"""Undefine the Class.
Python equivalent of the CLIPS undefclass command.
The object becomes unusable after this method has been called.
"""
if lib.EnvUndefclass(self._env, self._cls) != 1:
raise CLIPSError(self._env)
self._env = None
|
python
|
{
"resource": ""
}
|
q18241
|
FortranReader.include
|
train
|
def include(self):
"""
If the next line is an include statement, inserts the contents
of the included file into the pending buffer.
"""
if len(self.pending) == 0 or not self.pending[0].startswith('include '):
return
name = self.pending.pop(0)[8:].strip()[1:-1]
for b in [os.path.dirname(self.name)] + self.inc_dirs:
pname = os.path.abspath(os.path.expanduser(os.path.join(b, name)))
if os.path.isfile(pname):
name = pname
break
else:
raise Exception('Can not find include file "{}".'.format(name))
self.pending = list(FortranReader(name, self.docmark, self.predocmark,
self.docmark_alt, self.predocmark_alt,
self.fixed, self.length_limit,
inc_dirs=self.inc_dirs)) + self.pending
|
python
|
{
"resource": ""
}
|
q18242
|
GraphData.register
|
train
|
def register(self,obj,cls=type(None),hist={}):
"""
Takes a FortranObject and adds it to the appropriate list, if
not already present.
"""
#~ ident = getattr(obj,'ident',obj)
if is_submodule(obj,cls):
if obj not in self.submodules: self.submodules[obj] = SubmodNode(obj,self)
elif is_module(obj,cls):
if obj not in self.modules: self.modules[obj] = ModNode(obj,self)
elif is_type(obj,cls):
if obj not in self.types: self.types[obj] = TypeNode(obj,self,hist)
elif is_proc(obj,cls):
if obj not in self.procedures: self.procedures[obj] = ProcNode(obj,self,hist)
elif is_program(obj,cls):
if obj not in self.programs: self.programs[obj] = ProgNode(obj,self)
elif is_sourcefile(obj,cls):
if obj not in self.sourcefiles: self.sourcefiles[obj] = FileNode(obj,self)
elif is_blockdata(obj,cls):
if obj not in self.blockdata: self.blockdata[obj] = BlockNode(obj,self)
else:
raise BadType("Object type {} not recognized by GraphData".format(type(obj).__name__))
|
python
|
{
"resource": ""
}
|
q18243
|
GraphData.get_node
|
train
|
def get_node(self,obj,cls=type(None),hist={}):
"""
Returns the node corresponding to obj. If does not already exist
then it will create it.
"""
#~ ident = getattr(obj,'ident',obj)
if obj in self.modules and is_module(obj,cls):
return self.modules[obj]
elif obj in self.submodules and is_submodule(obj,cls):
return self.submodules[obj]
elif obj in self.types and is_type(obj,cls):
return self.types[obj]
elif obj in self.procedures and is_proc(obj,cls):
return self.procedures[obj]
elif obj in self.programs and is_program(obj,cls):
return self.programs[obj]
elif obj in self.sourcefiles and is_sourcefile(obj,cls):
return self.sourcefiles[obj]
elif obj in self.blockdata and is_blockdata(obj,cls):
return self.blockdata[obj]
else:
self.register(obj,cls,hist)
return self.get_node(obj,cls,hist)
|
python
|
{
"resource": ""
}
|
q18244
|
FortranGraph.add_to_graph
|
train
|
def add_to_graph(self, nodes, edges, nesting):
"""
Adds nodes and edges to the graph as long as the maximum number
of nodes is not exceeded.
All edges are expected to have a reference to an entry in nodes.
If the list of nodes is not added in the first hop due to graph
size limitations, they are stored in hopNodes.
If the graph was extended the function returns True, otherwise the
result will be False.
"""
if (len(nodes) + len(self.added)) > self.max_nodes:
if nesting < 2:
self.hopNodes = nodes
self.hopEdges = edges
self.truncated = nesting
return False
else:
for n in nodes:
self.dot.node(n.ident, **n.attribs)
for e in edges:
if len(e) == 5:
self.dot.edge(e[0].ident, e[1].ident, style=e[2],
color=e[3], label=e[4])
else:
self.dot.edge(e[0].ident, e[1].ident, style=e[2],
color=e[3])
self.added.update(nodes)
return True
|
python
|
{
"resource": ""
}
|
q18245
|
ModuleGraph.add_nodes
|
train
|
def add_nodes(self, nodes, nesting=1):
"""
Adds nodes and edges for generating the graph showing the relationship
between modules and submodules listed in nodes.
"""
hopNodes = set() # nodes in this hop
hopEdges = [] # edges in this hop
# get nodes and edges for this hop
for i, n in zip(range(len(nodes)), nodes):
r, g, b = rainbowcolour(i, len(nodes))
colour = '#%02X%02X%02X' % (r, g, b)
for nu in n.uses:
if nu not in self.added:
hopNodes.add(nu)
hopEdges.append((n, nu, 'dashed', colour))
if hasattr(n, 'ancestor'):
if n.ancestor not in self.added:
hopNodes.add(n.ancestor)
hopEdges.append((n, n.ancestor, 'solid', colour))
# add nodes, edges and attributes to the graph if maximum number of
# nodes is not exceeded
if self.add_to_graph(hopNodes, hopEdges, nesting):
self.dot.attr('graph', size='11.875,1000.0')
|
python
|
{
"resource": ""
}
|
q18246
|
FileGraph.add_nodes
|
train
|
def add_nodes(self, nodes, nesting=1):
"""
Adds edges showing dependencies between source files listed in
the nodes.
"""
hopNodes = set() # nodes in this hop
hopEdges = [] # edges in this hop
# get nodes and edges for this hop
for i, n in zip(range(len(nodes)), nodes):
r, g, b = rainbowcolour(i, len(nodes))
colour = '#%02X%02X%02X' % (r, g, b)
for ne in n.efferent:
if ne not in self.added:
hopNodes.add(ne)
hopEdges.append((ne, n, 'solid', colour))
# add nodes and edges to the graph if maximum number of nodes is not
# exceeded
self.add_to_graph(hopNodes, hopEdges, nesting)
|
python
|
{
"resource": ""
}
|
q18247
|
CallGraph.add_nodes
|
train
|
def add_nodes(self, nodes, nesting=1):
"""
Adds edges indicating the call-tree for the procedures listed in
the nodes.
"""
hopNodes = set() # nodes in this hop
hopEdges = [] # edges in this hop
# get nodes and edges for this hop
for i, n in zip(range(len(nodes)), nodes):
r, g, b = rainbowcolour(i, len(nodes))
colour = '#%02X%02X%02X' % (r, g, b)
for p in n.calls:
if p not in hopNodes:
hopNodes.add(p)
hopEdges.append((n, p, 'solid', colour))
for p in getattr(n, 'interfaces', []):
if p not in hopNodes:
hopNodes.add(p)
hopEdges.append((n, p, 'dashed', colour))
# add nodes, edges and attributes to the graph if maximum number of
# nodes is not exceeded
if self.add_to_graph(hopNodes, hopEdges, nesting):
self.dot.attr('graph', size='11.875,1000.0')
self.dot.attr('graph', concentrate='false')
|
python
|
{
"resource": ""
}
|
q18248
|
DefaultObjectPolicy.exception
|
train
|
def exception(self, url, exception):
'''What to return when there's an exception.'''
return (time.time() + self.ttl, self.factory(url))
|
python
|
{
"resource": ""
}
|
q18249
|
parse_date
|
train
|
def parse_date(string):
'''Return a timestamp for the provided datestring, described by RFC 7231.'''
parsed = email.utils.parsedate_tz(string)
if parsed is None:
raise ValueError("Invalid time.")
parsed = list(parsed)
# Default time zone is GMT/UTC
parsed[9] = 0 if parsed[9] is None else parsed[9]
return email.utils.mktime_tz(parsed)
|
python
|
{
"resource": ""
}
|
q18250
|
HeaderWithDefaultPolicy.ttl
|
train
|
def ttl(self, response):
'''Get the ttl from headers.'''
# If max-age is specified in Cache-Control, use it and ignore any
# Expires header, as per RFC2616 Sec. 13.2.4.
cache_control = response.headers.get('cache-control')
if cache_control is not None:
for directive in cache_control.split(','):
name, _, value = directive.lower().partition('=')
name = name.strip()
if name in ('no-store', 'must-revalidate', 'no-cache'):
return max(self.minimum, 0)
elif name in ('s-maxage', 'max-age'):
try:
return max(self.minimum, int(value.strip()))
except ValueError:
logger.warn(
'Could not parse %s=%s', name, value, exc_info=1)
# Check the Expires header
expires = response.headers.get('expires')
if expires is not None:
# Evaluate the expiration relative to the server-provided date
date = response.headers.get('date')
if date is not None:
try:
date = parse_date(date)
except ValueError:
logger.warn(
'Could not parse date string %s', date, exc_info=1)
date = time.time()
else:
date = time.time()
try:
return max(self.minimum, parse_date(expires) - date)
except ValueError:
logger.warn(
'Could not parse date string %s', expires, exc_info=1)
return self.default
|
python
|
{
"resource": ""
}
|
q18251
|
ExpiringObject.get
|
train
|
def get(self):
'''Get the wrapped object.'''
if (self.obj is None) or (time.time() >= self.expires):
with self.lock:
self.expires, self.obj = self.factory()
if isinstance(self.obj, BaseException):
self.exception = self.obj
else:
self.exception = None
if self.exception:
raise self.exception
else:
return self.obj
|
python
|
{
"resource": ""
}
|
q18252
|
BaseCache.get
|
train
|
def get(self, url):
'''Get the entity that corresponds to URL.'''
robots_url = Robots.robots_url(url)
if robots_url not in self.cache:
self.cache[robots_url] = ExpiringObject(partial(self.factory, robots_url))
return self.cache[robots_url].get()
|
python
|
{
"resource": ""
}
|
q18253
|
RobotsCache.allowed
|
train
|
def allowed(self, url, agent):
'''Return true if the provided URL is allowed to agent.'''
return self.get(url).allowed(url, agent)
|
python
|
{
"resource": ""
}
|
q18254
|
timer
|
train
|
def timer(name, count):
'''Time this block.'''
start = time.time()
try:
yield count
finally:
duration = time.time() - start
print(name)
print('=' * 10)
print('Total: %s' % duration)
print(' Avg: %s' % (duration / count))
print(' Rate: %s' % (count / duration))
print('')
|
python
|
{
"resource": ""
}
|
q18255
|
add_command_arguments
|
train
|
def add_command_arguments(parser):
"""
Additional command line arguments for the behave management command
"""
parser.add_argument(
'--noinput',
'--no-input',
action='store_const',
const=False,
dest='interactive',
help='Tells Django to NOT prompt the user for input of any kind.',
)
parser.add_argument(
'--failfast', action='store_const', const=True, dest='failfast',
help=('Tells Django to stop running the '
'test suite after first failed test.'),
)
parser.add_argument(
'-r', '--reverse', action='store_const', const=True, dest='reverse',
help='Reverses test cases order.',
)
parser.add_argument(
'--use-existing-database',
action='store_true',
default=False,
help="Don't create a test database. USE AT YOUR OWN RISK!",
)
parser.add_argument(
'-k', '--keepdb',
action='store_const',
const=True,
help="Preserves the test DB between runs.",
)
parser.add_argument(
'-S', '--simple',
action='store_true',
default=False,
help="Use simple test runner that supports Django's"
" testing client only (no web browser automation)"
)
|
python
|
{
"resource": ""
}
|
q18256
|
add_behave_arguments
|
train
|
def add_behave_arguments(parser): # noqa
"""
Additional command line arguments extracted directly from behave
"""
# Option strings that conflict with Django
conflicts = [
'--no-color',
'--version',
'-c',
'-k',
'-v',
'-S',
'--simple',
]
parser.add_argument(
'paths',
action='store',
nargs='*',
help="Feature directory, file or file location (FILE:LINE)."
)
for fixed, keywords in behave_options:
keywords = keywords.copy()
# Configfile only entries are ignored
if not fixed:
continue
# Build option strings
option_strings = []
for option in fixed:
# Prefix conflicting option strings with `--behave`
if option in conflicts:
prefix = '--' if option.startswith('--') else '-'
option = option.replace(prefix, '--behave-', 1)
option_strings.append(option)
# config_help isn't a valid keyword for add_argument
if 'config_help' in keywords:
keywords['help'] = keywords['config_help']
del keywords['config_help']
parser.add_argument(*option_strings, **keywords)
|
python
|
{
"resource": ""
}
|
q18257
|
Command.add_arguments
|
train
|
def add_arguments(self, parser):
"""
Add behave's and our command line arguments to the command
"""
parser.usage = "%(prog)s [options] [ [DIR|FILE|FILE:LINE] ]+"
parser.description = """\
Run a number of feature tests with behave."""
add_command_arguments(parser)
add_behave_arguments(parser)
|
python
|
{
"resource": ""
}
|
q18258
|
Command.get_behave_args
|
train
|
def get_behave_args(self, argv=sys.argv):
"""
Get a list of those command line arguments specified with the
management command that are meant as arguments for running behave.
"""
parser = BehaveArgsHelper().create_parser('manage.py', 'behave')
args, unknown = parser.parse_known_args(argv[2:])
behave_args = []
for option in unknown:
# Remove behave prefix
if option.startswith('--behave-'):
option = option.replace('--behave-', '', 1)
prefix = '-' if len(option) == 1 else '--'
option = prefix + option
behave_args.append(option)
return behave_args
|
python
|
{
"resource": ""
}
|
q18259
|
load_registered_fixtures
|
train
|
def load_registered_fixtures(context):
"""
Apply fixtures that are registered with the @fixtures decorator.
"""
# -- SELECT STEP REGISTRY:
# HINT: Newer behave versions use runner.step_registry
# to be able to support multiple runners, each with its own step_registry.
runner = context._runner # pylint: disable=protected-access
step_registry = getattr(runner, 'step_registry', None)
if not step_registry:
# -- BACKWARD-COMPATIBLE: Use module_step_registry
step_registry = module_step_registry.registry
# -- SETUP SCENARIO FIXTURES:
for step in context.scenario.all_steps:
match = step_registry.find_match(step)
if match and hasattr(match.func, 'registered_fixtures'):
if not context.test.fixtures:
context.test.fixtures = []
context.test.fixtures.extend(match.func.registered_fixtures)
|
python
|
{
"resource": ""
}
|
q18260
|
BehaveHooksMixin.patch_context
|
train
|
def patch_context(self, context):
"""
Patches the context to add utility functions
Sets up the base_url, and the get_url() utility function.
"""
context.__class__ = PatchedContext
# Simply setting __class__ directly doesn't work
# because behave.runner.Context.__setattr__ is implemented wrongly.
object.__setattr__(context, '__class__', PatchedContext)
|
python
|
{
"resource": ""
}
|
q18261
|
BehaveHooksMixin.setup_fixtures
|
train
|
def setup_fixtures(self, context):
"""
Sets up fixtures
"""
if getattr(context, 'fixtures', None):
context.test.fixtures = copy(context.fixtures)
if getattr(context, 'reset_sequences', None):
context.test.reset_sequences = context.reset_sequences
if getattr(context, 'multi_db', None):
context.test.__class__.multi_db = context.multi_db
if hasattr(context, 'scenario'):
load_registered_fixtures(context)
|
python
|
{
"resource": ""
}
|
q18262
|
DenonAVR.exec_appcommand_post
|
train
|
def exec_appcommand_post(self, attribute_list):
"""
Prepare and execute a HTTP POST call to AppCommand.xml end point.
Returns XML ElementTree on success and None on fail.
"""
# Prepare POST XML body for AppCommand.xml
post_root = ET.Element("tx")
for attribute in attribute_list:
# Append tags for each attribute
item = ET.Element("cmd")
item.set("id", "1")
item.text = attribute
post_root.append(item)
# Buffer XML body as binary IO
body = BytesIO()
post_tree = ET.ElementTree(post_root)
post_tree.write(body, encoding="utf-8", xml_declaration=True)
# Query receivers AppCommand.xml
try:
res = self.send_post_command(
self._urls.appcommand, body.getvalue())
except requests.exceptions.RequestException:
_LOGGER.error("No connection to %s end point on host %s",
self._urls.appcommand, self._host)
body.close()
else:
# Buffered XML not needed anymore: close
body.close()
try:
# Return XML ElementTree
root = ET.fromstring(res)
except (ET.ParseError, TypeError):
_LOGGER.error(
"End point %s on host %s returned malformed XML.",
self._urls.appcommand, self._host)
else:
return root
|
python
|
{
"resource": ""
}
|
q18263
|
DenonAVR.get_status_xml
|
train
|
def get_status_xml(self, command, suppress_errors=False):
"""Get status XML via HTTP and return it as XML ElementTree."""
# Get XML structure via HTTP get
res = requests.get("http://{host}:{port}{command}".format(
host=self._host, port=self._receiver_port, command=command),
timeout=self.timeout)
# Continue with XML processing only if HTTP status code = 200
if res.status_code == 200:
try:
# Return XML ElementTree
return ET.fromstring(res.text)
except ET.ParseError as err:
if not suppress_errors:
_LOGGER.error(
"Host %s returned malformed XML for end point %s",
self._host, command)
_LOGGER.error(err)
raise ValueError
else:
if not suppress_errors:
_LOGGER.error((
"Host %s returned HTTP status code %s to GET request at "
"end point %s"), self._host, res.status_code, command)
raise ValueError
|
python
|
{
"resource": ""
}
|
q18264
|
DenonAVR.send_get_command
|
train
|
def send_get_command(self, command):
"""Send command via HTTP get to receiver."""
# Send commands via HTTP get
res = requests.get("http://{host}:{port}{command}".format(
host=self._host, port=self._receiver_port, command=command),
timeout=self.timeout)
if res.status_code == 200:
return True
else:
_LOGGER.error((
"Host %s returned HTTP status code %s to GET command at "
"end point %s"), self._host, res.status_code, command)
return False
|
python
|
{
"resource": ""
}
|
q18265
|
DenonAVR.send_post_command
|
train
|
def send_post_command(self, command, body):
"""Send command via HTTP post to receiver."""
# Send commands via HTTP post
res = requests.post("http://{host}:{port}{command}".format(
host=self._host, port=self._receiver_port, command=command),
data=body, timeout=self.timeout)
if res.status_code == 200:
return res.text
else:
_LOGGER.error((
"Host %s returned HTTP status code %s to POST command at "
"end point %s"), self._host, res.status_code, command)
return False
|
python
|
{
"resource": ""
}
|
q18266
|
DenonAVR.create_zones
|
train
|
def create_zones(self, add_zones):
"""Create instances of additional zones for the receiver."""
for zone, zname in add_zones.items():
# Name either set explicitly or name of Main Zone with suffix
zonename = "{} {}".format(self._name, zone) if (
zname is None) else zname
zone_inst = DenonAVRZones(self, zone, zonename)
self._zones[zone] = zone_inst
|
python
|
{
"resource": ""
}
|
q18267
|
DenonAVR._update_input_func_list
|
train
|
def _update_input_func_list(self):
"""
Update sources list from receiver.
Internal method which updates sources list of receiver after getting
sources and potential renaming information from receiver.
"""
# Get all sources and renaming information from receiver
# For structural information of the variables please see the methods
receiver_sources = self._get_receiver_sources()
if not receiver_sources:
_LOGGER.error("Receiver sources list empty. "
"Please check if device is powered on.")
return False
# First input_func_list determination of AVR-X receivers
if self._receiver_type in [AVR_X.type, AVR_X_2016.type]:
renamed_sources, deleted_sources, status_success = (
self._get_renamed_deleted_sourcesapp())
# Backup if previous try with AppCommand was not successful
if not status_success:
renamed_sources, deleted_sources = (
self._get_renamed_deleted_sources())
# Remove all deleted sources
if self._show_all_inputs is False:
for deleted_source in deleted_sources.items():
if deleted_source[1] == "DEL":
receiver_sources.pop(deleted_source[0], None)
# Clear and rebuild the sources lists
self._input_func_list.clear()
self._input_func_list_rev.clear()
self._netaudio_func_list.clear()
self._playing_func_list.clear()
for item in receiver_sources.items():
# Mapping of item[0] because some func names are inconsistant
# at AVR-X receivers
m_item_0 = SOURCE_MAPPING.get(item[0], item[0])
# For renamed sources use those names and save the default name
# for a later mapping
if item[0] in renamed_sources:
self._input_func_list[renamed_sources[item[0]]] = m_item_0
self._input_func_list_rev[
m_item_0] = renamed_sources[item[0]]
# If the source is a netaudio source, save its renamed name
if item[0] in NETAUDIO_SOURCES:
self._netaudio_func_list.append(
renamed_sources[item[0]])
# If the source is a playing source, save its renamed name
if item[0] in PLAYING_SOURCES:
self._playing_func_list.append(
renamed_sources[item[0]])
# Otherwise the default names are used
else:
self._input_func_list[item[1]] = m_item_0
self._input_func_list_rev[m_item_0] = item[1]
# If the source is a netaudio source, save its name
if item[1] in NETAUDIO_SOURCES:
self._netaudio_func_list.append(item[1])
# If the source is a playing source, save its name
if item[1] in PLAYING_SOURCES:
self._playing_func_list.append(item[1])
# Determination of input_func_list for non AVR-nonX receivers
elif self._receiver_type == AVR.type:
# Clear and rebuild the sources lists
self._input_func_list.clear()
self._input_func_list_rev.clear()
self._netaudio_func_list.clear()
self._playing_func_list.clear()
for item in receiver_sources.items():
self._input_func_list[item[1]] = item[0]
self._input_func_list_rev[item[0]] = item[1]
# If the source is a netaudio source, save its name
if item[0] in NETAUDIO_SOURCES:
self._netaudio_func_list.append(item[1])
# If the source is a playing source, save its name
if item[0] in PLAYING_SOURCES:
self._playing_func_list.append(item[1])
else:
_LOGGER.error('Receiver type not set yet.')
return False
# Finished
return True
|
python
|
{
"resource": ""
}
|
q18268
|
DenonAVR._get_receiver_name
|
train
|
def _get_receiver_name(self):
"""Get name of receiver from web interface if not set."""
# If name is not set yet, get it from Main Zone URL
if self._name is None and self._urls.mainzone is not None:
name_tag = {"FriendlyName": None}
try:
root = self.get_status_xml(self._urls.mainzone)
except (ValueError,
requests.exceptions.RequestException):
_LOGGER.warning("Receiver name could not be determined. "
"Using standard name: Denon AVR.")
self._name = "Denon AVR"
else:
# Get the tags from this XML
name_tag = self._get_status_from_xml_tags(root, name_tag)
if name_tag:
_LOGGER.warning("Receiver name could not be determined. "
"Using standard name: Denon AVR.")
self._name = "Denon AVR"
|
python
|
{
"resource": ""
}
|
q18269
|
DenonAVR._get_zone_name
|
train
|
def _get_zone_name(self):
"""Get receivers zone name if not set yet."""
if self._name is None:
# Collect tags for AppCommand.xml call
tags = ["GetZoneName"]
# Execute call
root = self.exec_appcommand_post(tags)
# Check result
if root is None:
_LOGGER.error("Getting ZoneName failed.")
else:
zone = self._get_own_zone()
try:
name = root.find(
"./cmd/{zone}".format(zone=zone)).text
except AttributeError:
_LOGGER.error("No ZoneName found for zone %s", self.zone)
else:
self._name = name.strip()
|
python
|
{
"resource": ""
}
|
q18270
|
DenonAVR._get_receiver_sources
|
train
|
def _get_receiver_sources(self):
"""
Get sources list from receiver.
Internal method which queries device via HTTP to get the receiver's
input sources.
This method also determines the type of the receiver
(avr, avr-x, avr-x-2016).
"""
# Test if receiver is a AVR-X with port 80 for pre 2016 devices and
# port 8080 devices 2016 and later
r_types = [AVR_X, AVR_X_2016]
for r_type, port in r_types:
self._receiver_port = port
# This XML is needed to get the sources of the receiver
try:
root = self.get_status_xml(self._urls.deviceinfo,
suppress_errors=True)
except (ValueError, requests.exceptions.RequestException):
self._receiver_type = None
else:
# First test by CommApiVers
try:
if bool(DEVICEINFO_COMMAPI_PATTERN.search(
root.find("CommApiVers").text) is not None):
self._receiver_type = r_type
# receiver found break the loop
break
except AttributeError:
# AttributeError occurs when ModelName tag is not found.
# In this case there is no AVR-X device
self._receiver_type = None
# if first test did not find AVR-X device, check by model name
if self._receiver_type is None:
try:
if bool(DEVICEINFO_AVR_X_PATTERN.search(
root.find("ModelName").text) is not None):
self._receiver_type = r_type
# receiver found break the loop
break
except AttributeError:
# AttributeError occurs when ModelName tag is not found
# In this case there is no AVR-X device
self._receiver_type = None
# Set ports and update method
if self._receiver_type is None:
self._receiver_type = AVR.type
self._receiver_port = AVR.port
elif self._receiver_type == AVR_X_2016.type:
self._receiver_port = AVR_X_2016.port
else:
self._receiver_port = AVR_X.port
_LOGGER.info("Identified receiver type: '%s' on port: '%s'",
self._receiver_type, self._receiver_port)
# Not an AVR-X device, start determination of sources
if self._receiver_type == AVR.type:
# Sources list is equal to list of renamed sources.
non_x_sources, deleted_non_x_sources, status_success = (
self._get_renamed_deleted_sourcesapp())
# Backup if previous try with AppCommand was not successful
if not status_success:
non_x_sources, deleted_non_x_sources = (
self._get_renamed_deleted_sources())
# Remove all deleted sources
if self._show_all_inputs is False:
for deleted_source in deleted_non_x_sources.items():
if deleted_source[1] == "DEL":
non_x_sources.pop(deleted_source[0], None)
# Invalid source "SOURCE" needs to be deleted
non_x_sources.pop("SOURCE", None)
return non_x_sources
# Following source determination of AVR-X receivers
else:
# receiver_sources is of type dict with "FuncName" as key and
# "DefaultName" as value.
receiver_sources = {}
# Source determination from XML
favorites = root.find(".//FavoriteStation")
if favorites:
for child in favorites:
if not child.tag.startswith("Favorite"):
continue
func_name = child.tag.upper()
self._favorite_func_list.append(func_name)
receiver_sources[func_name] = child.find("Name").text
for xml_zonecapa in root.findall("DeviceZoneCapabilities"):
# Currently only Main Zone (No=0) supported
if xml_zonecapa.find("./Zone/No").text == "0":
# Get list of all input sources of receiver
xml_list = xml_zonecapa.find("./InputSource/List")
for xml_source in xml_list.findall("Source"):
receiver_sources[
xml_source.find(
"FuncName").text] = xml_source.find(
"DefaultName").text
return receiver_sources
|
python
|
{
"resource": ""
}
|
q18271
|
DenonAVR._get_status_from_xml_tags
|
train
|
def _get_status_from_xml_tags(self, root, relevant_tags):
"""
Get relevant status tags from XML structure with this internal method.
Status is saved to internal attributes.
Return dictionary of tags not found in XML.
"""
for child in root:
if child.tag not in relevant_tags.keys():
continue
elif child.tag == "Power":
self._power = child[0].text
relevant_tags.pop(child.tag, None)
elif child.tag == "InputFuncSelect":
inputfunc = child[0].text
if inputfunc is not None:
try:
self._input_func = self._input_func_list_rev[inputfunc]
except KeyError:
_LOGGER.info(
"No mapping for source %s found", inputfunc)
self._input_func = inputfunc
finally:
relevant_tags.pop(child.tag, None)
elif child.tag == "MasterVolume":
self._volume = child[0].text
relevant_tags.pop(child.tag, None)
elif child.tag == "Mute":
self._mute = child[0].text
relevant_tags.pop(child.tag, None)
elif child.tag == "FriendlyName" and self._name is None:
self._name = child[0].text
relevant_tags.pop(child.tag, None)
elif child.tag == "selectSurround" or child.tag == "SurrMode":
self._sound_mode_raw = child[0].text.rstrip()
relevant_tags.pop("selectSurround", None)
relevant_tags.pop("SurrMode", None)
return relevant_tags
|
python
|
{
"resource": ""
}
|
q18272
|
DenonAVR.set_input_func
|
train
|
def set_input_func(self, input_func):
"""
Set input_func of device.
Valid values depend on the device and should be taken from
"input_func_list".
Return "True" on success and "False" on fail.
"""
# For selection of sources other names then at receiving sources
# have to be used
# AVR-X receiver needs source mapping to set input_func
if self._receiver_type in [AVR_X.type, AVR_X_2016.type]:
direct_mapping = False
try:
linp = CHANGE_INPUT_MAPPING[self._input_func_list[input_func]]
except KeyError:
direct_mapping = True
else:
direct_mapping = True
# AVR-nonX receiver and if no mapping was found get parameter for
# setting input_func directly
if direct_mapping is True:
try:
linp = self._input_func_list[input_func]
except KeyError:
_LOGGER.error("No mapping for input source %s", input_func)
return False
# Create command URL and send command via HTTP GET
try:
if linp in self._favorite_func_list:
command_url = self._urls.command_fav_src + linp
else:
command_url = self._urls.command_sel_src + linp
if self.send_get_command(command_url):
self._input_func = input_func
return True
else:
return False
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: input function %s not set.",
input_func)
return False
|
python
|
{
"resource": ""
}
|
q18273
|
DenonAVR._set_all_zone_stereo
|
train
|
def _set_all_zone_stereo(self, zst_on):
"""
Set All Zone Stereo option on the device.
Calls command to activate/deactivate the mode
Return "True" when successfully sent.
"""
command_url = self._urls.command_set_all_zone_stereo
if zst_on:
command_url += "ZST ON"
else:
command_url += "ZST OFF"
try:
return self.send_get_command(command_url)
except requests.exceptions.RequestException:
_LOGGER.error(
"Connection error: unable to set All Zone Stereo to %s",
zst_on)
return False
|
python
|
{
"resource": ""
}
|
q18274
|
DenonAVR.set_sound_mode
|
train
|
def set_sound_mode(self, sound_mode):
"""
Set sound_mode of device.
Valid values depend on the device and should be taken from
"sound_mode_list".
Return "True" on success and "False" on fail.
"""
if sound_mode == ALL_ZONE_STEREO:
if self._set_all_zone_stereo(True):
self._sound_mode_raw = ALL_ZONE_STEREO
return True
else:
return False
if self._sound_mode_raw == ALL_ZONE_STEREO:
if not self._set_all_zone_stereo(False):
return False
# For selection of sound mode other names then at receiving sound modes
# have to be used
# Therefore source mapping is needed to get sound_mode
# Create command URL and send command via HTTP GET
command_url = self._urls.command_sel_sound_mode + sound_mode
# sent command
try:
if self.send_get_command(command_url):
self._sound_mode_raw = self._sound_mode_dict[sound_mode][0]
return True
else:
return False
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: sound mode function %s not set.",
sound_mode)
return False
|
python
|
{
"resource": ""
}
|
q18275
|
DenonAVR.set_sound_mode_dict
|
train
|
def set_sound_mode_dict(self, sound_mode_dict):
"""Set the matching dictionary used to match the raw sound mode."""
error_msg = ("Syntax of sound mode dictionary not valid, "
"use: OrderedDict([('COMMAND', ['VALUE1','VALUE2'])])")
if isinstance(sound_mode_dict, dict):
mode_list = list(sound_mode_dict.values())
for sublist in mode_list:
if isinstance(sublist, list):
for element in sublist:
if not isinstance(element, str):
_LOGGER.error(error_msg)
return False
else:
_LOGGER.error(error_msg)
return False
else:
_LOGGER.error(error_msg)
return False
self._sound_mode_dict = sound_mode_dict
self._sm_match_dict = self.construct_sm_match_dict()
return True
|
python
|
{
"resource": ""
}
|
q18276
|
DenonAVR.construct_sm_match_dict
|
train
|
def construct_sm_match_dict(self):
"""
Construct the sm_match_dict.
Reverse the key value structure. The sm_match_dict is bigger,
but allows for direct matching using a dictionary key access.
The sound_mode_dict is uses externally to set this dictionary
because that has a nicer syntax.
"""
mode_dict = list(self._sound_mode_dict.items())
match_mode_dict = {}
for matched_mode, sublist in mode_dict:
for raw_mode in sublist:
match_mode_dict[raw_mode.upper()] = matched_mode
return match_mode_dict
|
python
|
{
"resource": ""
}
|
q18277
|
DenonAVR.match_sound_mode
|
train
|
def match_sound_mode(self, sound_mode_raw):
"""Match the raw_sound_mode to its corresponding sound_mode."""
try:
sound_mode = self._sm_match_dict[sound_mode_raw.upper()]
return sound_mode
except KeyError:
smr_up = sound_mode_raw.upper()
self._sound_mode_dict[smr_up] = [smr_up]
self._sm_match_dict = self.construct_sm_match_dict()
_LOGGER.warning("Not able to match sound mode: '%s', "
"returning raw sound mode.", sound_mode_raw)
return sound_mode_raw
|
python
|
{
"resource": ""
}
|
q18278
|
DenonAVR.toggle_play_pause
|
train
|
def toggle_play_pause(self):
"""Toggle play pause media player."""
# Use Play/Pause button only for sources which support NETAUDIO
if (self._state == STATE_PLAYING and
self._input_func in self._netaudio_func_list):
return self._pause()
elif self._input_func in self._netaudio_func_list:
return self._play()
|
python
|
{
"resource": ""
}
|
q18279
|
DenonAVR._play
|
train
|
def _play(self):
"""Send play command to receiver command via HTTP post."""
# Use pause command only for sources which support NETAUDIO
if self._input_func in self._netaudio_func_list:
body = {"cmd0": "PutNetAudioCommand/CurEnter",
"cmd1": "aspMainZone_WebUpdateStatus/",
"ZoneName": "MAIN ZONE"}
try:
if self.send_post_command(
self._urls.command_netaudio_post, body):
self._state = STATE_PLAYING
return True
else:
return False
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: play command not sent.")
return False
|
python
|
{
"resource": ""
}
|
q18280
|
DenonAVR._pause
|
train
|
def _pause(self):
"""Send pause command to receiver command via HTTP post."""
# Use pause command only for sources which support NETAUDIO
if self._input_func in self._netaudio_func_list:
body = {"cmd0": "PutNetAudioCommand/CurEnter",
"cmd1": "aspMainZone_WebUpdateStatus/",
"ZoneName": "MAIN ZONE"}
try:
if self.send_post_command(
self._urls.command_netaudio_post, body):
self._state = STATE_PAUSED
return True
else:
return False
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: pause command not sent.")
return False
|
python
|
{
"resource": ""
}
|
q18281
|
DenonAVR.previous_track
|
train
|
def previous_track(self):
"""Send previous track command to receiver command via HTTP post."""
# Use previous track button only for sources which support NETAUDIO
if self._input_func in self._netaudio_func_list:
body = {"cmd0": "PutNetAudioCommand/CurUp",
"cmd1": "aspMainZone_WebUpdateStatus/",
"ZoneName": "MAIN ZONE"}
try:
return bool(self.send_post_command(
self._urls.command_netaudio_post, body))
except requests.exceptions.RequestException:
_LOGGER.error(
"Connection error: previous track command not sent.")
return False
|
python
|
{
"resource": ""
}
|
q18282
|
DenonAVR.volume_up
|
train
|
def volume_up(self):
"""Volume up receiver via HTTP get command."""
try:
return bool(self.send_get_command(self._urls.command_volume_up))
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: volume up command not sent.")
return False
|
python
|
{
"resource": ""
}
|
q18283
|
DenonAVR.volume_down
|
train
|
def volume_down(self):
"""Volume down receiver via HTTP get command."""
try:
return bool(self.send_get_command(self._urls.command_volume_down))
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: volume down command not sent.")
return False
|
python
|
{
"resource": ""
}
|
q18284
|
DenonAVR.set_volume
|
train
|
def set_volume(self, volume):
"""
Set receiver volume via HTTP get command.
Volume is send in a format like -50.0.
Minimum is -80.0, maximum at 18.0
"""
if volume < -80 or volume > 18:
raise ValueError("Invalid volume")
try:
return bool(self.send_get_command(
self._urls.command_set_volume % volume))
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: set volume command not sent.")
return False
|
python
|
{
"resource": ""
}
|
q18285
|
DenonAVR.mute
|
train
|
def mute(self, mute):
"""Mute receiver via HTTP get command."""
try:
if mute:
if self.send_get_command(self._urls.command_mute_on):
self._mute = STATE_ON
return True
else:
return False
else:
if self.send_get_command(self._urls.command_mute_off):
self._mute = STATE_OFF
return True
else:
return False
except requests.exceptions.RequestException:
_LOGGER.error("Connection error: mute command not sent.")
return False
|
python
|
{
"resource": ""
}
|
q18286
|
DenonAVRZones.sound_mode
|
train
|
def sound_mode(self):
"""Return the matched current sound mode as a string."""
sound_mode_matched = self._parent_avr.match_sound_mode(
self._parent_avr.sound_mode_raw)
return sound_mode_matched
|
python
|
{
"resource": ""
}
|
q18287
|
identify_denonavr_receivers
|
train
|
def identify_denonavr_receivers():
"""
Identify DenonAVR using SSDP and SCPD queries.
Returns a list of dictionaries which includes all discovered Denon AVR
devices with keys "host", "modelName", "friendlyName", "presentationURL".
"""
# Sending SSDP broadcast message to get devices
devices = send_ssdp_broadcast()
# Check which responding device is a DenonAVR device and prepare output
receivers = []
for device in devices:
try:
receiver = evaluate_scpd_xml(device["URL"])
except ConnectionError:
continue
if receiver:
receivers.append(receiver)
return receivers
|
python
|
{
"resource": ""
}
|
q18288
|
send_ssdp_broadcast
|
train
|
def send_ssdp_broadcast():
"""
Send SSDP broadcast message to discover UPnP devices.
Returns a list of dictionaries with "address" (IP, PORT) and "URL"
of SCPD XML for all discovered devices.
"""
# Send up to three different broadcast messages
for i, ssdp_query in enumerate(SSDP_QUERIES):
# Prepare SSDP broadcast message
sock = socket.socket(
socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.settimeout(2)
sock.sendto(ssdp_query.encode(), (SSDP_ADDR, SSDP_PORT))
# Collect all responses within the timeout period
res = []
try:
while True:
res.append(sock.recvfrom(10240))
except socket.timeout:
sock.close()
if res:
_LOGGER.debug("Got results after %s SSDP queries", i + 1)
sock.close()
break
# Prepare output of responding devices
devices = {}
device = {}
for entry in res:
device["address"] = entry[1]
# Some string operations to get the receivers URL
# which could be found between LOCATION and end of line of the response
en_decoded = entry[0].decode("utf-8")
# If location is not found, skip the entry
try:
device["URL"] = (
en_decoded[
en_decoded.lower().index(
"location:") + 10:en_decoded.index(
"\r\n", en_decoded.lower().index("location:"))]
)
except ValueError:
continue
devices[device["address"]] = device.copy()
_LOGGER.debug("Following devices found: %s", list(devices.values()))
return list(devices.values())
|
python
|
{
"resource": ""
}
|
q18289
|
evaluate_scpd_xml
|
train
|
def evaluate_scpd_xml(url):
"""
Get and evaluate SCPD XML to identified URLs.
Returns dictionary with keys "host", "modelName", "friendlyName" and
"presentationURL" if a Denon AVR device was found and "False" if not.
"""
# Get SCPD XML via HTTP GET
try:
res = requests.get(url, timeout=2)
except requests.exceptions.RequestException as err:
_LOGGER.error(
"When trying to request %s the following error occurred: %s",
url, err)
raise ConnectionError
if res.status_code == 200:
try:
root = ET.fromstring(res.text)
# Look for manufacturer "Denon" in response.
# Using "try" in case tags are not available in XML
_LOGGER.debug("Device %s has manufacturer %s", url,
root.find(SCPD_DEVICE).find(SCPD_MANUFACTURER).text)
if (root.find(SCPD_DEVICE).find(
SCPD_MANUFACTURER).text in SUPPORTED_MANUFACTURERS and
root.find(SCPD_DEVICE).find(
SCPD_DEVICETYPE).text == DEVICETYPE_DENON):
device = {}
device["host"] = urlparse(
root.find(SCPD_DEVICE).find(
SCPD_PRESENTATIONURL).text).hostname
device["presentationURL"] = (
root.find(SCPD_DEVICE).find(SCPD_PRESENTATIONURL).text)
device["modelName"] = (
root.find(SCPD_DEVICE).find(SCPD_MODELNAME).text)
device["friendlyName"] = (
root.find(SCPD_DEVICE).find(SCPD_FRIENDLYNAME).text)
return device
else:
return False
except (AttributeError, ValueError, ET.ParseError) as err:
_LOGGER.error(
"Error occurred during evaluation of SCPD XML: %s", err)
return False
else:
_LOGGER.error("Host returned HTTP status %s when connecting to %s",
res.status_code, url)
raise ConnectionError
|
python
|
{
"resource": ""
}
|
q18290
|
init_all_receivers
|
train
|
def init_all_receivers():
"""
Initialize all discovered Denon AVR receivers in LAN zone.
Returns a list of created Denon AVR instances.
By default SSDP broadcasts are sent up to 3 times with a 2 seconds timeout.
"""
receivers = discover()
init_receivers = []
for receiver in receivers:
init_receiver = DenonAVR(receiver["host"])
init_receivers.append(init_receiver)
return init_receivers
|
python
|
{
"resource": ""
}
|
q18291
|
parse_auto_sub
|
train
|
def parse_auto_sub(text):
'''
Parses webvtt and returns timestamps for words and lines
Tested on automatically generated subtitles from YouTube
'''
pat = r'<(\d\d:\d\d:\d\d(\.\d+)?)>'
out = []
lines = []
data = text.split('\n')
data = [d for d in data if re.search(r'\d\d:\d\d:\d\d', d) is not None]
for i, d in enumerate(data):
if re.search(pat, d):
lines.append((data[i-1], d))
if len(lines) > 0:
out = parse_cued(lines)
else:
out = parse_uncued(text)
return out
|
python
|
{
"resource": ""
}
|
q18292
|
Timecode.tc_to_frames
|
train
|
def tc_to_frames(self, timecode):
"""Converts the given timecode to frames
"""
hours, minutes, seconds, frames = map(int, timecode.split(':'))
ffps = float(self._framerate)
if self.drop_frame:
# Number of drop frames is 6% of framerate rounded to nearest
# integer
drop_frames = int(round(ffps * .066666))
else:
drop_frames = 0
# We don't need the exact framerate anymore, we just need it rounded to
# nearest integer
ifps = self._int_framerate
# Number of frames per hour (non-drop)
hour_frames = ifps * 60 * 60
# Number of frames per minute (non-drop)
minute_frames = ifps * 60
# Total number of minutes
total_minutes = (60 * hours) + minutes
frame_number = \
((hour_frames * hours) + (minute_frames * minutes) +
(ifps * seconds) + frames) - \
(drop_frames * (total_minutes - (total_minutes // 10)))
frames = frame_number + 1
return frames
|
python
|
{
"resource": ""
}
|
q18293
|
Timecode.frames_to_tc
|
train
|
def frames_to_tc(self, frames):
"""Converts frames back to timecode
:returns str: the string representation of the current time code
"""
if frames == 0:
return 0, 0, 0, 0
ffps = float(self._framerate)
if self.drop_frame:
# Number of frames to drop on the minute marks is the nearest
# integer to 6% of the framerate
drop_frames = int(round(ffps * .066666))
else:
drop_frames = 0
# Number of frames in an hour
frames_per_hour = int(round(ffps * 60 * 60))
# Number of frames in a day - timecode rolls over after 24 hours
frames_per_24_hours = frames_per_hour * 24
# Number of frames per ten minutes
frames_per_10_minutes = int(round(ffps * 60 * 10))
# Number of frames per minute is the round of the framerate * 60 minus
# the number of dropped frames
frames_per_minute = int(round(ffps)*60) - drop_frames
frame_number = frames - 1
if frame_number < 0:
# Negative time. Add 24 hours.
frame_number += frames_per_24_hours
# If frame_number is greater than 24 hrs, next operation will rollover
# clock
frame_number %= frames_per_24_hours
if self.drop_frame:
d = frame_number // frames_per_10_minutes
m = frame_number % frames_per_10_minutes
if m > drop_frames:
frame_number += (drop_frames * 9 * d) + \
drop_frames * ((m - drop_frames) // frames_per_minute)
else:
frame_number += drop_frames * 9 * d
ifps = self._int_framerate
frs = frame_number % ifps
secs = (frame_number // ifps) % 60
mins = ((frame_number // ifps) // 60) % 60
hrs = (((frame_number // ifps) // 60) // 60)
return hrs, mins, secs, frs
|
python
|
{
"resource": ""
}
|
q18294
|
make_edl
|
train
|
def make_edl(timestamps, name):
'''Converts an array of ordered timestamps into an EDL string'''
fpses = {}
out = "TITLE: {}\nFCM: NON-DROP FRAME\n\n".format(name)
rec_in = 0
for index, timestamp in enumerate(timestamps):
if timestamp['file'] not in fpses:
fpses[timestamp['file']] = get_fps(timestamp['file'])
fps = fpses[timestamp['file']]
n = str(index + 1).zfill(4)
time_in = timestamp['start']
time_out = timestamp['end']
duration = time_out - time_in
rec_out = rec_in + duration
full_name = 'reel_{}'.format(n)
filename = timestamp['file']
out += make_edl_segment(n, time_in, time_out, rec_in, rec_out, full_name, filename, fps=fps)
rec_in = rec_out
with open(name, 'w') as outfile:
outfile.write(out)
|
python
|
{
"resource": ""
}
|
q18295
|
convert_timespan
|
train
|
def convert_timespan(timespan):
"""Convert an srt timespan into a start and end timestamp."""
start, end = timespan.split('-->')
start = convert_timestamp(start)
end = convert_timestamp(end)
return start, end
|
python
|
{
"resource": ""
}
|
q18296
|
convert_timestamp
|
train
|
def convert_timestamp(timestamp):
"""Convert an srt timestamp into seconds."""
timestamp = timestamp.strip()
chunk, millis = timestamp.split(',')
hours, minutes, seconds = chunk.split(':')
hours = int(hours)
minutes = int(minutes)
seconds = int(seconds)
seconds = seconds + hours * 60 * 60 + minutes * 60 + float(millis) / 1000
return seconds
|
python
|
{
"resource": ""
}
|
q18297
|
clean_srt
|
train
|
def clean_srt(srt):
"""Remove damaging line breaks and numbers from srt files and return a
dictionary.
"""
with open(srt, 'r') as f:
text = f.read()
text = re.sub(r'^\d+[\n\r]', '', text, flags=re.MULTILINE)
lines = text.splitlines()
output = OrderedDict()
key = ''
for line in lines:
line = line.strip()
if line.find('-->') > -1:
key = line
output[key] = ''
else:
if key != '':
output[key] += line + ' '
return output
|
python
|
{
"resource": ""
}
|
q18298
|
cleanup_log_files
|
train
|
def cleanup_log_files(outputfile):
"""Search for and remove temp log files found in the output directory."""
d = os.path.dirname(os.path.abspath(outputfile))
logfiles = [f for f in os.listdir(d) if f.endswith('ogg.log')]
for f in logfiles:
os.remove(f)
|
python
|
{
"resource": ""
}
|
q18299
|
demo_supercut
|
train
|
def demo_supercut(composition, padding):
"""Print out timespans to be cut followed by the line number in the srt."""
for i, c in enumerate(composition):
line = c['line']
start = c['start']
end = c['end']
if i > 0 and composition[i - 1]['file'] == c['file'] and start < composition[i - 1]['end']:
start = start + padding
print("{1} to {2}:\t{0}".format(line, start, end))
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.