_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q20500
|
DataBase.DbGetDeviceAttributeList
|
train
|
def DbGetDeviceAttributeList(self, argin):
""" Return list of attributes matching the wildcard
for the specified device
:param argin: Str[0] = Device name
Str[1] = Wildcard
:type: tango.DevVarStringArray
:return: attribute name list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDeviceAttributeList()")
dev_name = argin[0]
wildcard = argin[1]
if not wildcard:
wildcard = "%"
else:
wildcard = replace_wildcard(wildcard)
return self.db.get_device_attribute_list(dev_name, wildcard)
|
python
|
{
"resource": ""
}
|
q20501
|
DataBase.DbGetHostServersInfo
|
train
|
def DbGetHostServersInfo(self, argin):
""" Get info about all servers running on specified host, name, mode and level
:param argin: Host name
:type: tango.DevString
:return: Server info for all servers running on specified host
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetHostServersInfo()")
argin = replace_wildcard(argin)
return self.db.get_host_servers_info(argin)
|
python
|
{
"resource": ""
}
|
q20502
|
DataBase.DbRenameServer
|
train
|
def DbRenameServer(self, argin):
""" Rename a device server process
:param argin: str[0] = old device server name (exec/instance)
str[1] = new device server name (exec/instance)
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbRenameServer()")
if len(argin) < 2:
self.warn_stream("DataBase::DbRenameServer(): insufficient number of arguments ")
th_exc(DB_IncorrectArguments,
"insufficient number of arguments (two required: old name and new name",
"DataBase::DbRenameServer")
old_name = argin[0]
new_name = argin[1]
if ('/' not in argin[0]) or ('/' not in argin[1]):
self.warn_stream("DataBase::DbRenameServer(): wrong syntax in command args ")
th_exc(DB_IncorrectArguments,
"Wrong syntax in command args (ds_exec_name/inst_name)",
"DataBase::DbRenameServer")
self.db.rename_server(old_name, new_name)
|
python
|
{
"resource": ""
}
|
q20503
|
DataBase.DbGetHostList
|
train
|
def DbGetHostList(self, argin):
""" Get host list with name matching the specified filter
:param argin: The filter
:type: tango.DevString
:return: Host name list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetHostList()")
argin = replace_wildcard(argin)
return self.db.get_host_list(argin)
|
python
|
{
"resource": ""
}
|
q20504
|
DataBase.DbGetClassInheritanceForDevice
|
train
|
def DbGetClassInheritanceForDevice(self, argin):
""" Get class inheritance for the specified device.
:param argin: Device name
:type: tango.DevString
:return: Classes off the specified device.
[0] - is the class of the device.
[1] - is the class from the device class is inherited.
........and so on
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetClassInheritanceForDevice()")
return self.db.get_class_inheritance_for_device(argin)
|
python
|
{
"resource": ""
}
|
q20505
|
DataBase.DbDeleteServer
|
train
|
def DbDeleteServer(self, argin):
""" Delete server from the database but dont delete device properties
:param argin: Device server name
:type: tango.DevString
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbDeleteServer()")
if '*' in argin or '%' in argin or not '/' in argin:
self.warn_stream("DataBase::db_delete_server(): server name " + argin + " incorrect ")
th_exc(DB_IncorrectServerName,
"failed to delete server, server name incorrect",
"DataBase::DeleteServer()")
self.db.delete_server(argin)
|
python
|
{
"resource": ""
}
|
q20506
|
DataBase.DbGetAttributeAlias
|
train
|
def DbGetAttributeAlias(self, argin):
""" Get the attribute name for the given alias.
If alias not found in database, returns an empty string.
:param argin: The attribute alias name
:type: tango.DevString
:return: The attribute name (device/attribute)
:rtype: tango.DevString """
self._log.debug("In DbGetAttributeAlias()")
return self.db.get_attribute_alias(argin)
|
python
|
{
"resource": ""
}
|
q20507
|
DataBase.DbGetProperty
|
train
|
def DbGetProperty(self, argin):
""" Get free object property
:param argin: Str[0] = Object name
Str[1] = Property name
Str[n] = Property name
:type: tango.DevVarStringArray
:return: Str[0] = Object name
Str[1] = Property number
Str[2] = Property name
Str[3] = Property value number (array case)
Str[4] = Property value 1
Str[n] = Property value n (array case)
Str[n + 1] = Property name
Str[n + 2] = Property value number (array case)
Str[n + 3] = Property value 1
Str[n + m] = Property value m
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetProperty()")
object_name = argin[0]
return self.db.get_property(object_name, argin[1:])
|
python
|
{
"resource": ""
}
|
q20508
|
DataBase.DbGetDeviceServerClassList
|
train
|
def DbGetDeviceServerClassList(self, argin):
""" Get list of Tango classes for a device server
:param argin: device server process name
:type: tango.DevString
:return: list of classes for this device server
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDeviceServerClassList()")
argin = replace_wildcard(argin)
return self.db.get_server_class_list(argin)
|
python
|
{
"resource": ""
}
|
q20509
|
DataBase.ResetTimingValues
|
train
|
def ResetTimingValues(self):
""" Reset the timing attribute values.
:param :
:type: tango.DevVoid
:return:
:rtype: tango.DevVoid """
self._log.debug("In ResetTimingValues()")
for tmp_timing in self.timing_maps.itervalues():
tmp_timing.average = 0.
tmp_timing.minimum = 0.
tmp_timing.maximum = 0.
tmp_timing.total_elapsed = 0.
tmp_timing.calls = 0.
|
python
|
{
"resource": ""
}
|
q20510
|
DataBase.DbImportDevice
|
train
|
def DbImportDevice(self, argin):
""" Import a device from the database
:param argin: Device name (or alias)
:type: tango.DevString
:return: Str[0] = device name
Str[1] = CORBA IOR
Str[2] = device version
Str[3] = device server process name
Str[4] = host name
Str[5] = Tango class name
Lg[0] = Exported flag
Lg[1] = Device server process PID
:rtype: tango.DevVarLongStringArray """
self._log.debug("In DbImportDevice()")
return self.db.import_device(argin.lower())
|
python
|
{
"resource": ""
}
|
q20511
|
DataBase.DbGetClassForDevice
|
train
|
def DbGetClassForDevice(self, argin):
""" Get Tango class for the specified device.
:param argin: Device name
:type: tango.DevString
:return: Device Tango class
:rtype: tango.DevString """
self._log.debug("In DbGetClassForDevice()")
return self.db.get_class_for_device(argin)
|
python
|
{
"resource": ""
}
|
q20512
|
DataBase.DbGetDeviceAttributePropertyHist
|
train
|
def DbGetDeviceAttributePropertyHist(self, argin):
""" Retrieve device attribute property history
:param argin: Str[0] = Device name
Str[1] = Attribute name
Str[2] = Property name
:type: tango.DevVarStringArray
:return: Str[0] = Attribute name
Str[1] = Property name
Str[2] = date
Str[3] = Property value number (array case)
Str[4] = Property value 1
Str[n] = Property value n
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDeviceAttributePropertyHist()")
dev_name = argin[0]
attribute = replace_wildcard(argin[1])
prop_name = replace_wildcard(argin[2])
return self.db.get_device_attribute_property_hist(dev_name, attribute, prop_name)
|
python
|
{
"resource": ""
}
|
q20513
|
DataBase.DbGetServerInfo
|
train
|
def DbGetServerInfo(self, argin):
""" Get info about host, mode and level for specified server
:param argin: server name
:type: tango.DevString
:return: server info
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetServerInfo()")
return self.db.get_server_info(argin)
|
python
|
{
"resource": ""
}
|
q20514
|
DataBase.DbPutDeviceAlias
|
train
|
def DbPutDeviceAlias(self, argin):
""" Define alias for a given device name
:param argin: Str[0] = device name
Str[1] = alias name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbPutDeviceAlias()")
if len(argin) < 2:
self.warn_stream("DataBase::DbPutDeviceAlias(): insufficient number of arguments ")
th_exc(DB_IncorrectArguments,
"insufficient number of arguments to put device alias",
"DataBase::DbPutDeviceAlias()")
device_name = argin[0]
device_alias = argin[1]
self.db.put_device_alias(device_name, device_alias)
|
python
|
{
"resource": ""
}
|
q20515
|
DataBase.DbGetDevicePropertyList
|
train
|
def DbGetDevicePropertyList(self, argin):
""" Get property list belonging to the specified device and with
name matching the specified filter
:param argin: Str[0] = device name
Str[1] = Filter
:type: tango.DevVarStringArray
:return: Property name list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDevicePropertyList()")
device_name = argin[0]
prop_filter = argin[1]
prop_filter = replace_wildcard(prop_filter)
return self.db.get_device_property_list(device_name, prop_filter)
|
python
|
{
"resource": ""
}
|
q20516
|
DataBase.DbGetHostServerList
|
train
|
def DbGetHostServerList(self, argin):
""" Get list of device server process name running on host with name matching
the specified filter
:param argin: The filter
:type: tango.DevString
:return: Device server process name list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetHostServerList()")
argin = replace_wildcard(argin)
return self.db.get_host_server_list(argin)
|
python
|
{
"resource": ""
}
|
q20517
|
DataBase.DbGetObjectList
|
train
|
def DbGetObjectList(self, argin):
""" Get list of free object defined in database with name
matching the specified filter
:param argin: The filter
:type: tango.DevString
:return: Object name list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetObjectList()")
argin = replace_wildcard(argin)
return self.db.get_object_list(argin)
|
python
|
{
"resource": ""
}
|
q20518
|
DataBase.DbDeleteClassAttributeProperty
|
train
|
def DbDeleteClassAttributeProperty(self, argin):
""" delete class attribute properties from database
:param argin: Str[0] = Tango class name
Str[1] = Attribute name
Str[2] = Property name
Str[n] = Property name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbDeleteClassAttributeProperty()")
if len(argin) < 3:
self.warn_stream("DataBase::db_delete_class_attribute_property(): insufficient number of arguments ")
th_exc(DB_IncorrectArguments,
"insufficient number of arguments to delete class attribute property",
"DataBase::DeleteClassAttributeProperty()")
klass_name, attr_name = argin[:2]
for prop_name in argin[2:]:
self.db.delete_class_attribute_property(klass_name, attr_name, prop_name)
|
python
|
{
"resource": ""
}
|
q20519
|
DataBase.DbGetInstanceNameList
|
train
|
def DbGetInstanceNameList(self, argin):
""" Returns the instance names found for specified server.
:param argin: Server name
:type: tango.DevString
:return: The instance names found for specified server.
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetInstanceNameList()")
return self.db.get_instance_name_list(argin)
|
python
|
{
"resource": ""
}
|
q20520
|
DataBase.DbGetAttributeAlias2
|
train
|
def DbGetAttributeAlias2(self, argin):
""" Get the attribute alias from the attribute name.
Returns one empty string if nothing found in database
:param argin: The attribute name (dev_name/att_name)
:type: tango.DevString
:return: The attribute alias name (or empty string)
:rtype: tango.DevString """
self._log.debug("In DbGetAttributeAlias2()")
attr_name = argin[0]
return self.db.get_attribute_alias2(attr_name)
|
python
|
{
"resource": ""
}
|
q20521
|
DataBase.DbAddServer
|
train
|
def DbAddServer(self, argin):
""" Create a device server process entry in database
:param argin: Str[0] = Full device server name
Str[1] = Device(s) name
Str[2] = Tango class name
Str[n] = Device name
Str[n + 1] = Tango class name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbAddServer()")
if len(argin) < 3 or not len(argin) % 2:
self.warn_stream("DataBase::AddServer(): incorrect number of input arguments ")
th_exc(DB_IncorrectArguments,
"incorrect no. of input arguments, needs at least 3 (server,device,class)",
"DataBase::AddServer()")
server_name = argin[0]
for i in range((len(argin) - 1) / 2):
d_name, klass_name = argin[i * 2 + 1], argin[i * 2 + 2]
ret, dev_name, dfm = check_device_name(d_name)
if not ret:
th_exc(DB_IncorrectDeviceName,
"device name (" + d_name + ") syntax error (should be [tango:][//instance/]domain/family/member)",
"DataBase::AddServer()")
self.db.add_device(server_name, (dev_name, dfm) , klass_name)
|
python
|
{
"resource": ""
}
|
q20522
|
DataBase.DbImportEvent
|
train
|
def DbImportEvent(self, argin):
""" Get event channel info from database
:param argin: name of event channel or factory
:type: tango.DevString
:return: export information e.g. IOR
:rtype: tango.DevVarLongStringArray """
self._log.debug("In DbImportEvent()")
argin = replace_wildcard(argin.lower())
return self.db.import_event(argin)
|
python
|
{
"resource": ""
}
|
q20523
|
DataBase.DbGetDevicePropertyHist
|
train
|
def DbGetDevicePropertyHist(self, argin):
""" Retrieve device property history
:param argin: Str[0] = Device name
Str[1] = Property name
:type: tango.DevVarStringArray
:return: Str[0] = Property name
Str[1] = date
Str[2] = Property value number (array case)
Str[3] = Property value 1
Str[n] = Property value n
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDevicePropertyHist()")
device_name = argin[0]
prop_name = argin[1]
return self.db.get_device_property_hist(device_name, prop_name)
|
python
|
{
"resource": ""
}
|
q20524
|
DataBase.DbGetServerNameList
|
train
|
def DbGetServerNameList(self, argin):
""" Returns the list of server names found for the wildcard specified.
It returns only the server executable name without instance name as DbGetServerList.
:param argin: wildcard for server names.
:type: tango.DevString
:return: server names found.
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetServerNameList()")
argin = replace_wildcard(argin)
return self.db.get_server_name_list(argin)
|
python
|
{
"resource": ""
}
|
q20525
|
DataBase.DbGetDeviceAttributeProperty2
|
train
|
def DbGetDeviceAttributeProperty2(self, argin):
""" Retrieve device attribute properties. This command has the possibility to retrieve
device attribute properties which are arrays. It is not possible with the old
DbGetDeviceAttributeProperty command. Nevertheless, the old command has not been
deleted for compatibility reason
:param argin: Str[0] = Device name
Str[1] = Attribute name
Str[n] = Attribute name
:type: tango.DevVarStringArray
:return: Str[0] = Device name
Str[1] = Attribute property number
Str[2] = Attribute property 1 name
Str[3] = Attribute property 1 value number (array case)
Str[4] = Attribute property 1 value
Str[n] = Attribute property 1 value (array case)
Str[n + 1] = Attribute property 2 name
Str[n + 2] = Attribute property 2 value number (array case)
Str[n + 3] = Attribute property 2 value
Str[n + m] = Attribute property 2 value (array case)
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDeviceAttributeProperty2()")
dev_name = argin[0]
return self.db.get_device_attribute_property2(dev_name, argin[1:])
|
python
|
{
"resource": ""
}
|
q20526
|
DataBase.DbDeleteClassProperty
|
train
|
def DbDeleteClassProperty(self, argin):
""" Delete class properties from database
:param argin: Str[0] = Tango class name
Str[1] = Property name
Str[n] = Property name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbDeleteClassProperty()")
klass_name = argin[0]
for prop_name in argin[1:]:
self.db.delete_class_property(prop_name)
|
python
|
{
"resource": ""
}
|
q20527
|
DataBase.DbUnExportEvent
|
train
|
def DbUnExportEvent(self, argin):
""" Mark one event channel as non exported in database
:param argin: name of event channel or factory to unexport
:type: tango.DevString
:return: none
:rtype: tango.DevVoid """
self._log.debug("In DbUnExportEvent()")
event_name = argin[0].lower()
self.db.unexport_event(event_name)
|
python
|
{
"resource": ""
}
|
q20528
|
DataBase.DbPutClassAttributeProperty2
|
train
|
def DbPutClassAttributeProperty2(self, argin):
""" This command adds support for array properties compared to the previous one
called DbPutClassAttributeProperty. The old comman is still there for compatibility reason
:param argin: Str[0] = Tango class name
Str[1] = Attribute number
Str[2] = Attribute name
Str[3] = Property number
Str[4] = Property name
Str[5] = Property value number (array case)
Str[5] = Property value 1
Str[n] = Property value n (array case)
.....
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbPutClassAttributeProperty2()")
class_name = argin[0]
nb_attributes = int(argin[1])
self.db.put_class_attribute_property2(class_name, nb_attributes, argin[2:])
|
python
|
{
"resource": ""
}
|
q20529
|
DataBase.DbPutServerInfo
|
train
|
def DbPutServerInfo(self, argin):
""" Update server info including host, mode and level
:param argin: server info
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbPutServerInfo()")
if len(argin) < 4:
self.warn_stream("DataBase::DbPutServerInfo(): insufficient number of arguments ")
th_exc(DB_IncorrectArguments,
"insufficient server info",
"DataBase::DbPutServerInfo()")
tmp_server = argin[0].lower()
tmp_host = argin[1]
tmp_mode = argin[2]
tmp_level = argin[3]
tmp_extra = []
if len(argin) > 4:
tmp_extra = argin[4:]
tmp_len = len(argin) - 1
self.db.put_server_info(tmp_server, tmp_host, tmp_mode, tmp_level, tmp_extra)
|
python
|
{
"resource": ""
}
|
q20530
|
DataBase.DbDeleteDeviceAlias
|
train
|
def DbDeleteDeviceAlias(self, argin):
""" Delete a device alias.
:param argin: device alias name
:type: tango.DevString
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbDeleteDeviceAlias()")
self.db.delete_device_alias(argin)
|
python
|
{
"resource": ""
}
|
q20531
|
DataBase.DbExportEvent
|
train
|
def DbExportEvent(self, argin):
""" Export Event channel to database
:param argin: Str[0] = event channel name (or factory name)
Str[1] = CORBA IOR
Str[2] = Notifd host name
Str[3] = Notifd pid
Str[4] = Notifd version
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbExportEvent()")
if len(argin) < 5:
self.warn_stream("DataBase::db_export_event(): insufficient export info for event ")
th_exc(DB_IncorrectArguments,
"insufficient export info for event",
"DataBase::ExportEvent()")
event, IOR, host, pid, version = argin[:5]
event = replace_wildcard(event.lower())
self.db.export_event(event, IOR, host, pid, version)
|
python
|
{
"resource": ""
}
|
q20532
|
DataBase.DbGetPropertyHist
|
train
|
def DbGetPropertyHist(self, argin):
""" Retrieve object property history
:param argin: Str[0] = Object name
Str[2] = Property name
:type: tango.DevVarStringArray
:return: Str[0] = Property name
Str[1] = date
Str[2] = Property value number (array case)
Str[3] = Property value 1
Str[n] = Property value n
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetPropertyHist()")
object_name = argin[0]
prop_name = argin[1]
return self.db.get_property_hist(object_name, prop_name)
|
python
|
{
"resource": ""
}
|
q20533
|
DataBase.DbGetDeviceMemberList
|
train
|
def DbGetDeviceMemberList(self, argin):
""" Get a list of device name members for device name matching the
specified filter
:param argin: The filter
:type: tango.DevString
:return: Device names member list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDeviceMemberList()")
argin = replace_wildcard(argin)
return self.db.get_device_member_list(argin)
|
python
|
{
"resource": ""
}
|
q20534
|
DataBase.DbGetClassList
|
train
|
def DbGetClassList(self, argin):
""" Get Tango class list with a specified filter
:param argin: Filter
:type: tango.DevString
:return: Class list
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetClassList()")
server = replace_wildcard(argin)
return self.db.get_class_list(server)
|
python
|
{
"resource": ""
}
|
q20535
|
DataBase.DbGetAliasAttribute
|
train
|
def DbGetAliasAttribute(self, argin):
""" Get the attribute name from the given alias.
If the given alias is not found in database, returns an empty string
:param argin: The attribute alias
:type: tango.DevString
:return: The attribute name (dev_name/att_name)
:rtype: tango.DevString """
self._log.debug("In DbGetAliasAttribute()")
alias_name = argin[0]
return self.db.get_alias_attribute(alias_name)
|
python
|
{
"resource": ""
}
|
q20536
|
DataBase.DbDeleteServerInfo
|
train
|
def DbDeleteServerInfo(self, argin):
""" delete info related to a Tango devvice server process
:param argin: Device server name
:type: tango.DevString
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbDeleteServerInfo()")
self.db.delete_server_info(argin)
|
python
|
{
"resource": ""
}
|
q20537
|
DataBase.DbGetClassAttributeList
|
train
|
def DbGetClassAttributeList(self, argin):
""" Get attrilute list for a given Tango class with a specified filter
:param argin: Str[0] = Tango class name
Str[1] = Attribute name filter (eg: att*)
:type: tango.DevVarStringArray
:return: Str[0] = Class attribute name
Str[n] = Class attribute name
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetClassAttributeList()")
class_name = argin[0]
wildcard = replace_wildcard(argin[1])
return self.db.get_class_attribute_list(class_name, wildcard)
|
python
|
{
"resource": ""
}
|
q20538
|
DataBase.DbAddDevice
|
train
|
def DbAddDevice(self, argin):
""" Add a Tango class device to a specific device server
:param argin: Str[0] = Full device server process name
Str[1] = Device name
Str[2] = Tango class name
:type: tango.DevVarStringArray
:return:
:rtype: tango.DevVoid """
self._log.debug("In DbAddDevice()")
if len(argin) < 3:
self.warn_stream("DataBase::AddDevice(): incorrect number of input arguments ")
th_exc(DB_IncorrectArguments,
"incorrect no. of input arguments, needs at least 3 (server,device,class)",
"DataBase::AddDevice()")
self.info_stream("DataBase::AddDevice(): insert %s server with device %s",argin[0],argin[1])
server_name, d_name, klass_name = argin[:3]
if len(argin) > 3:
alias = argin[3]
else:
alias = None
ret, dev_name, dfm = check_device_name(d_name)
if not ret:
th_exc(DB_IncorrectDeviceName,
"device name (" + d_name + ") syntax error (should be [tango:][//instance/]domain/family/member)",
"DataBase::AddDevice()")
# Lock table
self.db.add_device(server_name, (dev_name, dfm) , klass_name, alias=alias)
|
python
|
{
"resource": ""
}
|
q20539
|
DataBase.DbGetDeviceList
|
train
|
def DbGetDeviceList(self, argin):
""" Get a list of devices for specified server and class.
:param argin: argin[0] : server name
argin[1] : class name
:type: tango.DevVarStringArray
:return: The list of devices for specified server and class.
:rtype: tango.DevVarStringArray """
self._log.debug("In DbGetDeviceList()")
server_name = replace_wildcard(argin[0])
class_name = replace_wildcard(argin[1])
return self.db.get_device_list(server_name, class_name)
|
python
|
{
"resource": ""
}
|
q20540
|
FuturesExecutor.delegate
|
train
|
def delegate(self, fn, *args, **kwargs):
"""Return the given operation as a concurrent future."""
return self.subexecutor.submit(fn, *args, **kwargs)
|
python
|
{
"resource": ""
}
|
q20541
|
get_enum_labels
|
train
|
def get_enum_labels(enum_cls):
"""
Return list of enumeration labels from Enum class.
The list is useful when creating an attribute, for the
`enum_labels` parameter. The enumeration values are checked
to ensure they are unique, start at zero, and increment by one.
:param enum_cls: the Enum class to be inspected
:type enum_cls: :py:obj:`enum.Enum`
:return: List of label strings
:rtype: :py:obj:`list`
:raises EnumTypeError: in case the given class is invalid
"""
if not issubclass(enum_cls, enum.Enum):
raise EnumTypeError("Input class '%s' must be derived from enum.Enum"
% enum_cls)
# Check there are no duplicate labels
try:
enum.unique(enum_cls)
except ValueError as exc:
raise EnumTypeError("Input class '%s' must be unique - %s"
% (enum_cls, exc))
# Check the values start at 0, and increment by 1, since that is
# assumed by tango's DEV_ENUM implementation.
values = [member.value for member in enum_cls]
if not values:
raise EnumTypeError("Input class '%s' has no members!" % enum_cls)
expected_value = 0
for value in values:
if value != expected_value:
raise EnumTypeError("Enum values for '%s' must start at 0 and "
"increment by 1. Values: %s"
% (enum_cls, values))
expected_value += 1
return [member.name for member in enum_cls]
|
python
|
{
"resource": ""
}
|
q20542
|
is_numerical
|
train
|
def is_numerical(tg_type, inc_array=False):
"""Tells if the given tango type is numerical
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is a numerical or False otherwise
:rtype: :py:obj:`bool`
"""
global _scalar_numerical_types, _array_numerical_types
if tg_type in _scalar_numerical_types:
return True
if not inc_array:
return False
return tg_type in _array_numerical_types
|
python
|
{
"resource": ""
}
|
q20543
|
is_int
|
train
|
def is_int(tg_type, inc_array=False):
"""Tells if the given tango type is integer
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is integer or False otherwise
:rtype: :py:obj:`bool`
"""
global _scalar_int_types, _array_int_types
if tg_type in _scalar_int_types:
return True
if not inc_array:
return False
return tg_type in _array_int_types
|
python
|
{
"resource": ""
}
|
q20544
|
is_float
|
train
|
def is_float(tg_type, inc_array=False):
"""Tells if the given tango type is float
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is float or False otherwise
:rtype: :py:obj:`bool`
"""
global _scalar_float_types, _array_float_types
if tg_type in _scalar_float_types:
return True
if not inc_array:
return False
return tg_type in _array_float_types
|
python
|
{
"resource": ""
}
|
q20545
|
is_bool
|
train
|
def is_bool(tg_type, inc_array=False):
"""Tells if the given tango type is boolean
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is boolean or False otherwise
:rtype: :py:obj:`bool`
"""
global _scalar_bool_types, _array_bool_types
if tg_type in _scalar_bool_types:
return True
if not inc_array:
return False
return tg_type in _array_bool_types
|
python
|
{
"resource": ""
}
|
q20546
|
is_str
|
train
|
def is_str(tg_type, inc_array=False):
"""Tells if the given tango type is string
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:param inc_array: (optional, default is False) determines if include array
in the list of checked types
:type inc_array: :py:obj:`bool`
:return: True if the given tango type is string or False otherwise
:rtype: :py:obj:`bool`
"""
global _scalar_str_types, _array_str_types
if tg_type in _scalar_str_types:
return True
if not inc_array:
return False
return tg_type in _array_str_types
|
python
|
{
"resource": ""
}
|
q20547
|
str_2_obj
|
train
|
def str_2_obj(obj_str, tg_type=None):
"""Converts a string into an object according to the given tango type
:param obj_str: the string to be converted
:type obj_str: :py:obj:`str`
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:return: an object calculated from the given string
:rtype: :py:obj:`object`
"""
if tg_type is None:
return obj_str
f = str
if is_scalar_type(tg_type):
if is_numerical_type(tg_type):
if obj_str in __NO_STR_VALUE:
return None
if is_int_type(tg_type):
f = int
elif is_float_type(tg_type):
f = float
elif is_bool_type(tg_type):
f = bool_
return f(obj_str)
|
python
|
{
"resource": ""
}
|
q20548
|
obj_2_str
|
train
|
def obj_2_str(obj, tg_type=None):
"""Converts a python object into a string according to the given tango type
:param obj: the object to be converted
:type obj: :py:obj:`object`
:param tg_type: tango type
:type tg_type: :class:`tango.CmdArgType`
:return: a string representation of the given object
:rtype: :py:obj:`str`
"""
if tg_type is None:
return obj
if tg_type in _scalar_types:
# scalar cases
if is_pure_str(obj):
return obj
elif is_non_str_seq(obj):
if not len(obj):
return ""
obj = obj[0]
return str(obj)
# sequence cases
if obj is None:
return ''
return '\n'.join([str(i) for i in obj])
|
python
|
{
"resource": ""
}
|
q20549
|
copy_doc
|
train
|
def copy_doc(klass, fnname):
"""Copies documentation string of a method from the super class into the
rewritten method of the given class"""
base_meth, base_func = __get_meth_func(klass.__base__, fnname)
meth, func = __get_meth_func(klass, fnname)
func.__doc__ = base_func.__doc__
|
python
|
{
"resource": ""
}
|
q20550
|
get_home
|
train
|
def get_home():
"""
Find user's home directory if possible. Otherwise raise error.
:return: user's home directory
:rtype: :py:obj:`str`
New in PyTango 7.1.4
"""
path = ''
try:
path = os.path.expanduser("~")
except:
pass
if not os.path.isdir(path):
for evar in ('HOME', 'USERPROFILE', 'TMP'):
try:
path = os.environ[evar]
if os.path.isdir(path):
break
except:
pass
if path:
return path
else:
raise RuntimeError('please define environment variable $HOME')
|
python
|
{
"resource": ""
}
|
q20551
|
_get_env_var
|
train
|
def _get_env_var(env_var_name):
"""
Returns the value for the given environment name
Search order:
* a real environ var
* HOME/.tangorc
* /etc/tangorc
:param env_var_name: the environment variable name
:type env_var_name: str
:return: the value for the given environment name
:rtype: str
New in PyTango 7.1.4
"""
if env_var_name in os.environ:
return os.environ[env_var_name]
fname = os.path.join(get_home(), '.tangorc')
if not os.path.exists(fname):
if os.name == 'posix':
fname = "/etc/tangorc"
if not os.path.exists(fname):
return None
for line in open(fname):
strippedline = line.split('#', 1)[0].strip()
if not strippedline:
# empty line
continue
tup = strippedline.split('=', 1)
if len(tup) != 2:
# illegal line!
continue
key, val = map(str.strip, tup)
if key == env_var_name:
return val
|
python
|
{
"resource": ""
}
|
q20552
|
get_attrs
|
train
|
def get_attrs(obj):
"""Helper for dir2 implementation."""
if not hasattr(obj, '__dict__'):
return [] # slots only
proxy_type = types.MappingProxyType if six.PY3 else types.DictProxyType
if not isinstance(obj.__dict__, (dict, proxy_type)):
print(type(obj.__dict__), obj)
raise TypeError("%s.__dict__ is not a dictionary" % obj.__name__)
return obj.__dict__.keys()
|
python
|
{
"resource": ""
}
|
q20553
|
dir2
|
train
|
def dir2(obj):
"""Default dir implementation.
Inspired by gist: katyukha/dirmixin.py
https://gist.github.com/katyukha/c6e5e2b829e247c9b009
"""
attrs = set()
if not hasattr(obj, '__bases__'):
# obj is an instance
if not hasattr(obj, '__class__'):
# slots
return sorted(get_attrs(obj))
klass = obj.__class__
attrs.update(get_attrs(klass))
else:
# obj is a class
klass = obj
for cls in klass.__bases__:
attrs.update(get_attrs(cls))
attrs.update(dir2(cls))
attrs.update(get_attrs(obj))
return list(attrs)
|
python
|
{
"resource": ""
}
|
q20554
|
CaselessList.findentry
|
train
|
def findentry(self, item):
"""A caseless way of checking if an item is in the list or not.
It returns None or the entry."""
if not isinstance(item, str):
raise TypeError(
'Members of this object must be strings. '
'You supplied \"%s\"' % type(item))
for entry in self:
if item.lower() == entry.lower():
return entry
return None
|
python
|
{
"resource": ""
}
|
q20555
|
CaselessList.remove
|
train
|
def remove(self, item):
"""Remove the first occurence of an item, the caseless way."""
for entry in self:
if item.lower() == entry.lower():
list.remove(self, entry)
return
raise ValueError(': list.remove(x): x not in list')
|
python
|
{
"resource": ""
}
|
q20556
|
CaselessList.append
|
train
|
def append(self, item):
"""Adds an item to the list and checks it's a string."""
if not isinstance(item, str):
raise TypeError(
'Members of this object must be strings. '
'You supplied \"%s\"' % type(item))
list.append(self, item)
|
python
|
{
"resource": ""
}
|
q20557
|
CaselessList.extend
|
train
|
def extend(self, item):
"""Extend the list with another list. Each member of the list must be
a string."""
if not isinstance(item, list):
raise TypeError(
'You can only extend lists with lists. '
'You supplied \"%s\"' % type(item))
for entry in item:
if not isinstance(entry, str):
raise TypeError(
'Members of this object must be strings. '
'You supplied \"%s\"' % type(entry))
list.append(self, entry)
|
python
|
{
"resource": ""
}
|
q20558
|
CaselessList.count
|
train
|
def count(self, item):
"""Counts references to 'item' in a caseless manner.
If item is not a string it will always return 0."""
if not isinstance(item, str):
return 0
count = 0
for entry in self:
if item.lower() == entry.lower():
count += 1
return count
|
python
|
{
"resource": ""
}
|
q20559
|
candidates
|
train
|
def candidates(word):
"Generate possible spelling corrections for word."
return (known([word]) or known(edits1(word)) or known(edits2(word)) or [word])
|
python
|
{
"resource": ""
}
|
q20560
|
Chat.__substituteFromClientStatement
|
train
|
def __substituteFromClientStatement(self,match,prevResponse,extraSymbol="",sessionID = "general"):
"""
Substitute from Client statement into respose
"""
prev = 0
startPadding = 1+len(extraSymbol)
finalResponse = ""
for m in re.finditer(r'%'+extraSymbol+'[0-9]+', prevResponse):
start = m.start(0)
end = m.end(0)
num = int(prevResponse[start+startPadding:end])
finalResponse += prevResponse[prev:start]
try:finalResponse += self._quote(self._substitute(match.group(num)),sessionID)
except IndexError as e:pass
prev = end
namedGroup = match.groupdict()
if namedGroup:
prevResponse = finalResponse + prevResponse[prev:]
finalResponse = ""
prev = 0
for m in re.finditer(r'%'+extraSymbol+'([a-zA-Z_][a-zA-Z_0-9]*)([^a-zA-Z_0-9]|$)', prevResponse):
start = m.start(1)
end = m.end(1)
finalResponse += prevResponse[prev:start]
try:
value = namedGroup[prevResponse[start+startPadding:end]]
if value:finalResponse += self._quote(self._substitute(value),sessionID)
except KeyError as e:pass
prev = end
return finalResponse + prevResponse[prev:]
|
python
|
{
"resource": ""
}
|
q20561
|
Chat.respond
|
train
|
def respond(self, text, sessionID = "general"):
"""
Generate a response to the user input.
:type text: str
:param text: The string to be mapped
:rtype: str
"""
text = self.__normalize(text)
previousText = self.__normalize(self.conversation[sessionID][-2])
text_correction = self.__correction(text)
current_topic = self.topic[sessionID]
current_topic_order = current_topic.split(".")
while current_topic_order:
try:return self.__response_on_topic(text, previousText, text_correction, current_topic, sessionID)
except ValueError as e:pass
current_topic_order.pop()
current_topic = ".".join(current_topic_order)
try:return self.__response_on_topic(text, previousText, text_correction, current_topic, sessionID)
except ValueError as e:return "Sorry I couldn't find anything relevant"
|
python
|
{
"resource": ""
}
|
q20562
|
run
|
train
|
def run(graph, *, plugins=None, services=None, strategy=None):
"""
Main entry point of bonobo. It takes a graph and creates all the necessary plumbing around to execute it.
The only necessary argument is a :class:`Graph` instance, containing the logic you actually want to execute.
By default, this graph will be executed using the "threadpool" strategy: each graph node will be wrapped in a
thread, and executed in a loop until there is no more input to this node.
You can provide plugins factory objects in the plugins list, this function will add the necessary plugins for
interactive console execution and jupyter notebook execution if it detects correctly that it runs in this context.
You'll probably want to provide a services dictionary mapping service names to service instances.
:param Graph graph: The :class:`Graph` to execute.
:param str strategy: The :class:`bonobo.execution.strategies.base.Strategy` to use.
:param list plugins: The list of plugins to enhance execution.
:param dict services: The implementations of services this graph will use.
:return bonobo.execution.graph.GraphExecutionContext:
"""
plugins = plugins or []
from bonobo import settings
settings.check()
if not settings.QUIET.get(): # pragma: no cover
if _is_interactive_console():
import mondrian
mondrian.setup(excepthook=True)
from bonobo.plugins.console import ConsoleOutputPlugin
if ConsoleOutputPlugin not in plugins:
plugins.append(ConsoleOutputPlugin)
if _is_jupyter_notebook():
try:
from bonobo.contrib.jupyter import JupyterOutputPlugin
except ImportError:
import logging
logging.warning(
'Failed to load jupyter widget. Easiest way is to install the optional "jupyter" '
"dependencies with «pip install bonobo[jupyter]», but you can also install a specific "
"version by yourself."
)
else:
if JupyterOutputPlugin not in plugins:
plugins.append(JupyterOutputPlugin)
import logging
logging.getLogger().setLevel(settings.LOGGING_LEVEL.get())
strategy = create_strategy(strategy)
from bonobo.util.errors import sweeten_errors
with sweeten_errors():
return strategy.execute(graph, plugins=plugins, services=services)
|
python
|
{
"resource": ""
}
|
q20563
|
get_argument_parser
|
train
|
def get_argument_parser(parser=None):
"""
Creates an argument parser with arguments to override the system environment.
:api: bonobo.get_argument_parser
:param _parser:
:return:
"""
if parser is None:
parser = argparse.ArgumentParser()
# Store globally to be able to warn the user about the fact he's probably wrong not to pass a parser to
# parse_args(), later.
global _parser
_parser = parser
_parser.add_argument("--default-env-file", "-E", action="append")
_parser.add_argument("--default-env", action="append")
_parser.add_argument("--env-file", action="append")
_parser.add_argument("--env", "-e", action="append")
return _parser
|
python
|
{
"resource": ""
}
|
q20564
|
parse_args
|
train
|
def parse_args(mixed=None):
"""
Context manager to extract and apply environment related options from the provided argparser result.
A dictionnary with unknown options will be yielded, so the remaining options can be used by the caller.
:api: bonobo.patch_environ
:param mixed: ArgumentParser instance, Namespace, or dict.
:return:
"""
if mixed is None:
global _parser
if _parser is not None:
warnings.warn(
"You are calling bonobo.parse_args() without a parser argument, but it looks like you created a parser before. You probably want to pass your parser to this call, or if creating a new parser here is really what you want to do, please create a new one explicitely to silence this warning."
)
# use the api from bonobo namespace, in case a command patched it.
import bonobo
mixed = bonobo.get_argument_parser()
if isinstance(mixed, argparse.ArgumentParser):
options = mixed.parse_args()
else:
options = mixed
if not isinstance(options, dict):
options = options.__dict__
# make a copy so we don't polute our parent variables.
options = dict(options)
# storage for values before patch.
_backup = {}
# Priority order: --env > --env-file > system > --default-env > --default-env-file
#
# * The code below is reading default-env before default-env-file as if the first sets something, default-env-file
# won't override it.
# * Then, env-file is read from before env, as the behaviour will be the oposite (env will override a var even if
# env-file sets something.)
try:
# Set default environment
for name, value in map(parse_var, options.pop("default_env", []) or []):
if not name in os.environ:
if not name in _backup:
_backup[name] = os.environ.get(name, None)
os.environ[name] = value
# Read and set default environment from file(s)
for filename in options.pop("default_env_file", []) or []:
for name, value in load_env_from_file(filename):
if not name in os.environ:
if not name in _backup:
_backup[name] = os.environ.get(name, None)
os.environ[name] = value
# Read and set environment from file(s)
for filename in options.pop("env_file", []) or []:
for name, value in load_env_from_file(filename):
if not name in _backup:
_backup[name] = os.environ.get(name, None)
os.environ[name] = value
# Set environment
for name, value in map(parse_var, options.pop("env", []) or []):
if not name in _backup:
_backup[name] = os.environ.get(name, None)
os.environ[name] = value
yield options
finally:
for name, value in _backup.items():
if value is None:
del os.environ[name]
else:
os.environ[name] = value
|
python
|
{
"resource": ""
}
|
q20565
|
ApiHelper.register
|
train
|
def register(self, x, graph=False):
"""Register a function as being part of an API, then returns the original function."""
if graph:
# This function must comply to the "graph" API interface, meaning it can bahave like bonobo.run.
from inspect import signature
parameters = list(signature(x).parameters)
required_parameters = {"plugins", "services", "strategy"}
assert (
len(parameters) > 0 and parameters[0] == "graph"
), 'First parameter of a graph api function must be "graph".'
assert (
required_parameters.intersection(parameters) == required_parameters
), "Graph api functions must define the following parameters: " + ", ".join(sorted(required_parameters))
self.__all__.append(get_name(x))
return x
|
python
|
{
"resource": ""
}
|
q20566
|
_open_url
|
train
|
def _open_url(url):
"""Open a HTTP connection to the URL and return a file-like object."""
response = requests.get(url, stream=True)
if response.status_code != 200:
raise IOError("Unable to download {}, HTTP {}".format(url, response.status_code))
return response
|
python
|
{
"resource": ""
}
|
q20567
|
BaseGraphExecutionContext.xstatus
|
train
|
def xstatus(self):
"""
UNIX-like exit status, only coherent if the context has stopped.
"""
return max(node.xstatus for node in self.nodes) if len(self.nodes) else 0
|
python
|
{
"resource": ""
}
|
q20568
|
GraphExecutionContext.write
|
train
|
def write(self, *messages):
"""Push a list of messages in the inputs of this graph's inputs, matching the output of special node "BEGIN" in
our graph."""
for i in self.graph.outputs_of(BEGIN):
for message in messages:
self[i].write(message)
|
python
|
{
"resource": ""
}
|
q20569
|
_resolve_transformations
|
train
|
def _resolve_transformations(transformations):
"""
Resolve a collection of strings into the matching python objects, defaulting to bonobo namespace if no package is provided.
Syntax for each string is path.to.package:attribute
:param transformations: tuple(str)
:return: tuple(object)
"""
registry = _ModulesRegistry()
transformations = transformations or []
for t in transformations:
try:
mod, attr = t.split(":", 1)
yield getattr(registry.require(mod), attr)
except ValueError:
yield getattr(bonobo, t)
|
python
|
{
"resource": ""
}
|
q20570
|
OrderFields
|
train
|
def OrderFields(fields):
"""
Transformation factory to reorder fields in a data stream.
:param fields:
:return: callable
"""
fields = list(fields)
@use_context
@use_raw_input
def _OrderFields(context, row):
nonlocal fields
context.setdefault("remaining", None)
if not context.output_type:
context.remaining = list(sorted(set(context.get_input_fields()) - set(fields)))
context.set_output_fields(fields + context.remaining)
yield tuple(row.get(field) for field in context.get_output_fields())
return _OrderFields
|
python
|
{
"resource": ""
}
|
q20571
|
SetFields
|
train
|
def SetFields(fields):
"""
Transformation factory that sets the field names on first iteration, without touching the values.
:param fields:
:return: callable
"""
@use_context
@use_no_input
def _SetFields(context):
nonlocal fields
if not context.output_type:
context.set_output_fields(fields)
return NOT_MODIFIED
return _SetFields
|
python
|
{
"resource": ""
}
|
q20572
|
split_token
|
train
|
def split_token(output):
"""
Split an output into token tuple, real output tuple.
:param output:
:return: tuple, tuple
"""
output = ensure_tuple(output)
flags, i, len_output, data_allowed = set(), 0, len(output), True
while i < len_output and isflag(output[i]):
if output[i].must_be_first and i:
raise ValueError("{} flag must be first.".format(output[i]))
if i and output[i - 1].must_be_last:
raise ValueError("{} flag must be last.".format(output[i - 1]))
if output[i] in flags:
raise ValueError("Duplicate flag {}.".format(output[i]))
flags.add(output[i])
data_allowed &= output[i].allows_data
i += 1
output = output[i:]
if not data_allowed and len(output):
raise ValueError("Output data provided after a flag that does not allow data.")
return flags, output
|
python
|
{
"resource": ""
}
|
q20573
|
NodeExecutionContext.start
|
train
|
def start(self):
"""
Starts this context, a.k.a the phase where you setup everything which will be necessary during the whole
lifetime of a transformation.
The "ContextCurrifier" is in charge of setting up a decorating stack, that includes both services and context
processors, and will call the actual node callable with additional parameters.
"""
super().start()
try:
initial = self._get_initial_context()
self._stack = ContextCurrifier(self.wrapped, *initial.args, **initial.kwargs)
if isconfigurabletype(self.wrapped):
try:
self.wrapped = self.wrapped(_final=True)
except Exception as exc:
# Not normal to have a partially configured object here, so let's warn the user instead of having get into
# the hard trouble of understanding that by himself.
raise TypeError(
"Configurables should be instanciated before execution starts.\nGot {!r}.\n".format(
self.wrapped
)
) from exc
else:
raise TypeError(
"Configurables should be instanciated before execution starts.\nGot {!r}.\n".format(
self.wrapped
)
)
self._stack.setup(self)
except Exception:
# Set the logging level to the lowest possible, to avoid double log.
self.fatal(sys.exc_info(), level=0)
# We raise again, so the error is not ignored out of execution loops.
raise
|
python
|
{
"resource": ""
}
|
q20574
|
NodeExecutionContext.loop
|
train
|
def loop(self):
"""
The actual infinite loop for this transformation.
"""
logger.debug("Node loop starts for {!r}.".format(self))
while self.should_loop:
try:
self.step()
except InactiveReadableError:
break
logger.debug("Node loop ends for {!r}.".format(self))
|
python
|
{
"resource": ""
}
|
q20575
|
NodeExecutionContext._step
|
train
|
def _step(self):
"""
A single step in the loop.
Basically gets an input bag, send it to the node, interpret the results.
"""
# Pull and check data
input_bag = self._get()
# Sent through the stack
results = self._stack(input_bag)
# self._exec_time += timer.duration
# Put data onto output channels
if isinstance(results, GeneratorType):
while True:
try:
# if kill flag was step, stop iterating.
if self._killed:
break
result = next(results)
except StopIteration:
# That's not an error, we're just done.
break
else:
# Push data (in case of an iterator)
self._put(self._cast(input_bag, result))
elif results:
# Push data (returned value)
self._put(self._cast(input_bag, results))
else:
# case with no result, an execution went through anyway, use for stats.
# self._exec_count += 1
pass
|
python
|
{
"resource": ""
}
|
q20576
|
NodeExecutionContext.stop
|
train
|
def stop(self):
"""
Cleanup the context, after the loop ended.
"""
if self._stack:
try:
self._stack.teardown()
except Exception:
self.fatal(sys.exc_info())
super().stop()
|
python
|
{
"resource": ""
}
|
q20577
|
NodeExecutionContext.write
|
train
|
def write(self, *messages):
"""
Push a message list to this context's input queue.
:param mixed value: message
"""
for message in messages:
if not isinstance(message, Token):
message = ensure_tuple(message, cls=self._input_type, length=self._input_length)
if self._input_length is None:
self._input_length = len(message)
self.input.put(message)
|
python
|
{
"resource": ""
}
|
q20578
|
NodeExecutionContext._put
|
train
|
def _put(self, value, _control=False):
"""
Sends a message to all of this context's outputs.
:param mixed value: message
:param _control: if true, won't count in statistics.
"""
if not _control:
self.increment("out")
for output in self.outputs:
output.put(value)
|
python
|
{
"resource": ""
}
|
q20579
|
AsyncNodeExecutionContext._get
|
train
|
async def _get(self):
"""
Read from the input queue.
If Queue raises (like Timeout or Empty), stat won't be changed.
"""
input_bag = await self.input.get()
# Store or check input type
if self._input_type is None:
self._input_type = type(input_bag)
elif type(input_bag) != self._input_type:
try:
if self._input_type == tuple:
input_bag = self._input_type(input_bag)
else:
input_bag = self._input_type(*input_bag)
except Exception as exc:
raise UnrecoverableTypeError(
"Input type changed to incompatible type between calls to {!r}.\nGot {!r} which is not of type {!r}.".format(
self.wrapped, input_bag, self._input_type
)
) from exc
# Store or check input length, which is a soft fallback in case we're just using tuples
if self._input_length is None:
self._input_length = len(input_bag)
elif len(input_bag) != self._input_length:
raise UnrecoverableTypeError(
"Input length changed between calls to {!r}.\nExpected {} but got {}: {!r}.".format(
self.wrapped, self._input_length, len(input_bag), input_bag
)
)
self.increment("in") # XXX should that go before type check ?
return input_bag
|
python
|
{
"resource": ""
}
|
q20580
|
create_strategy
|
train
|
def create_strategy(name=None):
"""
Create a strategy, or just returns it if it's already one.
:param name:
:return: Strategy
"""
import logging
from bonobo.execution.strategies.base import Strategy
if isinstance(name, Strategy):
return name
if name is None:
name = DEFAULT_STRATEGY
logging.debug("Creating execution strategy {!r}...".format(name))
try:
factory = STRATEGIES[name]
except KeyError as exc:
raise RuntimeError(
"Invalid strategy {}. Available choices: {}.".format(repr(name), ", ".join(sorted(STRATEGIES.keys())))
) from exc
return factory()
|
python
|
{
"resource": ""
}
|
q20581
|
_repr_html_
|
train
|
def _repr_html_():
"""This allows to easily display a version snippet in Jupyter."""
from bonobo.commands.version import get_versions
return (
'<div style="padding: 8px;">'
' <div style="float: left; width: 20px; height: 20px;">{}</div>'
' <pre style="white-space: nowrap; padding-left: 8px">{}</pre>'
"</div>"
).format(__logo__, "<br/>".join(get_versions(all=True)))
|
python
|
{
"resource": ""
}
|
q20582
|
get_graph
|
train
|
def get_graph(graph=None, *, _limit=(), _print=()):
"""
Extracts a list of cafes with on euro in Paris, renames the name, address and zipcode fields,
reorders the fields and formats to json and csv files.
"""
graph = graph or bonobo.Graph()
producer = (
graph.get_cursor()
>> ODSReader(dataset="liste-des-cafes-a-un-euro", netloc="opendata.paris.fr")
>> PartialGraph(*_limit)
>> bonobo.UnpackItems(0)
>> bonobo.Rename(name="nom_du_cafe", address="adresse", zipcode="arrondissement")
>> bonobo.Format(city="Paris", country="France")
>> bonobo.OrderFields(["name", "address", "zipcode", "city", "country", "geometry", "geoloc"])
>> PartialGraph(*_print)
)
# Comma separated values.
graph.get_cursor(producer.output) >> bonobo.CsvWriter(
"coffeeshops.csv", fields=["name", "address", "zipcode", "city"], delimiter=","
)
# Standard JSON
graph.get_cursor(producer.output) >> bonobo.JsonWriter(path="coffeeshops.json")
# Line-delimited JSON
graph.get_cursor(producer.output) >> bonobo.LdjsonWriter(path="coffeeshops.ldjson")
return graph
|
python
|
{
"resource": ""
}
|
q20583
|
entrypoint
|
train
|
def entrypoint(args=None):
"""
Main callable for "bonobo" entrypoint.
Will load commands from "bonobo.commands" entrypoints, using stevedore.
"""
mondrian.setup(excepthook=True)
logger = logging.getLogger()
logger.setLevel(settings.LOGGING_LEVEL.get())
parser = argparse.ArgumentParser()
parser.add_argument("--debug", "-D", action="store_true")
subparsers = parser.add_subparsers(dest="command")
subparsers.required = True
commands = {}
def register_extension(ext):
nonlocal commands
try:
parser = subparsers.add_parser(ext.name)
if isinstance(ext.plugin, type) and issubclass(ext.plugin, BaseCommand):
# current way, class based.
cmd = ext.plugin()
cmd.add_arguments(parser)
cmd.__name__ = ext.name
commands[ext.name] = cmd.handle
else:
# old school, function based.
commands[ext.name] = ext.plugin(parser)
except Exception:
logger.exception("Error while loading command {}.".format(ext.name))
from stevedore import ExtensionManager
mgr = ExtensionManager(namespace="bonobo.commands")
mgr.map(register_extension)
parsed_args = parser.parse_args(args).__dict__
if parsed_args.pop("debug", False):
settings.DEBUG.set(True)
settings.LOGGING_LEVEL.set(logging.DEBUG)
logger.setLevel(settings.LOGGING_LEVEL.get())
logger.debug("Command: " + parsed_args["command"] + " Arguments: " + repr(parsed_args))
# Get command handler, execute, rince.
command = commands[parsed_args.pop("command")]
command(**parsed_args)
return 0
|
python
|
{
"resource": ""
}
|
q20584
|
FileReader.output
|
train
|
def output(self, context, *args, **kwargs):
"""
Allow all readers to use eventually use output_fields XOR output_type options.
"""
output_fields = self.output_fields
output_type = self.output_type
if output_fields and output_type:
raise UnrecoverableError("Cannot specify both output_fields and output_type option.")
if self.output_type:
context.set_output_type(self.output_type)
if self.output_fields:
context.set_output_fields(self.output_fields)
yield
|
python
|
{
"resource": ""
}
|
q20585
|
FileReader.read
|
train
|
def read(self, file, *, fs):
"""
Write a row on the next line of given file.
Prefix is used for newlines.
"""
for line in file:
yield line.rstrip(self.eol)
|
python
|
{
"resource": ""
}
|
q20586
|
Graph.outputs_of
|
train
|
def outputs_of(self, idx, create=False):
""" Get a set of the outputs for a given node index.
"""
if create and not idx in self.edges:
self.edges[idx] = set()
return self.edges[idx]
|
python
|
{
"resource": ""
}
|
q20587
|
Graph.add_node
|
train
|
def add_node(self, c):
""" Add a node without connections in this graph and returns its index.
"""
idx = len(self.nodes)
self.edges[idx] = set()
self.nodes.append(c)
return idx
|
python
|
{
"resource": ""
}
|
q20588
|
Graph.add_chain
|
train
|
def add_chain(self, *nodes, _input=BEGIN, _output=None, _name=None):
""" Add a chain in this graph.
"""
if len(nodes):
_input = self._resolve_index(_input)
_output = self._resolve_index(_output)
_first = None
_last = None
for i, node in enumerate(nodes):
_last = self.add_node(node)
if not i and _name:
if _name in self.named:
raise KeyError("Duplicate name {!r} in graph.".format(_name))
self.named[_name] = _last
if _first is None:
_first = _last
self.outputs_of(_input, create=True).add(_last)
_input = _last
if _output is not None:
self.outputs_of(_input, create=True).add(_output)
if hasattr(self, "_topologcally_sorted_indexes_cache"):
del self._topologcally_sorted_indexes_cache
return GraphRange(self, _first, _last)
return GraphRange(self, None, None)
|
python
|
{
"resource": ""
}
|
q20589
|
Graph._resolve_index
|
train
|
def _resolve_index(self, mixed):
"""
Find the index based on various strategies for a node, probably an input or output of chain. Supported
inputs are indexes, node values or names.
"""
if mixed is None:
return None
if type(mixed) is int or mixed in self.edges:
return mixed
if isinstance(mixed, str) and mixed in self.named:
return self.named[mixed]
if mixed in self.nodes:
return self.nodes.index(mixed)
raise ValueError("Cannot find node matching {!r}.".format(mixed))
|
python
|
{
"resource": ""
}
|
q20590
|
get_path
|
train
|
def get_path():
"""
Shortcut for users whose theme is next to their conf.py.
"""
# Theme directory is defined as our parent directory
return os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
python
|
{
"resource": ""
}
|
q20591
|
create_or_update
|
train
|
def create_or_update(model, *, defaults=None, save=True, **kwargs):
"""
Create or update a django model instance.
:param model:
:param defaults:
:param kwargs:
:return: object, created, updated
"""
obj, created = model._default_manager.get_or_create(defaults=defaults, **kwargs)
updated = False
if not created:
if defaults:
for k, v in defaults.items():
if getattr(obj, k) != v:
setattr(obj, k, v)
updated = True
if updated and save:
obj.save()
return obj, created, updated
|
python
|
{
"resource": ""
}
|
q20592
|
_override_runner
|
train
|
def _override_runner(runner):
"""
Context manager that monkey patches `bonobo.run` function with our current command logic.
:param runner: the callable that will handle the `run()` logic.
"""
import bonobo
_get_argument_parser = bonobo.util.environ.get_argument_parser
_run = bonobo.run
try:
# Original get_argument_parser would create or update an argument parser with environment options, but here we
# already had them parsed so let's patch with something that creates an empty one instead.
def get_argument_parser(parser=None):
return parser or argparse.ArgumentParser()
bonobo.util.environ.get_argument_parser = get_argument_parser
bonobo.run = runner
yield runner
finally:
# Restore our saved values.
bonobo.util.environ.get_argument_parser = _get_argument_parser
bonobo.run = _run
|
python
|
{
"resource": ""
}
|
q20593
|
_install_requirements
|
train
|
def _install_requirements(requirements):
"""Install requirements given a path to requirements.txt file."""
import importlib
import pip
pip.main(["install", "-r", requirements])
# Some shenanigans to be sure everything is importable after this, especially .egg-link files which
# are referenced in *.pth files and apparently loaded by site.py at some magic bootstrap moment of the
# python interpreter.
pip.utils.pkg_resources = importlib.reload(pip.utils.pkg_resources)
import site
importlib.reload(site)
|
python
|
{
"resource": ""
}
|
q20594
|
ensure_tuple
|
train
|
def ensure_tuple(tuple_or_mixed, *, cls=None):
"""
If it's not a tuple, let's make a tuple of one item.
Otherwise, not changed.
:param tuple_or_mixed:
:return: tuple
"""
if cls is None:
cls = tuple
if isinstance(tuple_or_mixed, cls):
return tuple_or_mixed
if tuple_or_mixed is None:
return tuple.__new__(cls, ())
if isinstance(tuple_or_mixed, tuple):
return tuple.__new__(cls, tuple_or_mixed)
return tuple.__new__(cls, (tuple_or_mixed,))
|
python
|
{
"resource": ""
}
|
q20595
|
create_container
|
train
|
def create_container(services=None, factory=Container):
"""
Create a container with reasonable default service implementations for commonly use, standard-named, services.
Services:
- `fs` defaults to a fs2 instance based on current working directory
- `http`defaults to requests
:param services:
:return:
"""
container = factory(services) if services else factory()
if not "fs" in container:
import bonobo
container.setdefault("fs", bonobo.open_fs())
if not "http" in container:
import requests
container.setdefault("http", requests)
return container
|
python
|
{
"resource": ""
}
|
q20596
|
Registry.get_reader_factory_for
|
train
|
def get_reader_factory_for(self, name, *, format=None):
"""
Returns a callable to build a reader for the provided filename, eventually forcing a format.
:param name: filename
:param format: format
:return: type
"""
return self.get_factory_for(READER, name, format=format)
|
python
|
{
"resource": ""
}
|
q20597
|
Registry.get_writer_factory_for
|
train
|
def get_writer_factory_for(self, name, *, format=None):
"""
Returns a callable to build a writer for the provided filename, eventually forcing a format.
:param name: filename
:param format: format
:return: type
"""
return self.get_factory_for(WRITER, name, format=format)
|
python
|
{
"resource": ""
}
|
q20598
|
Client.async_init
|
train
|
async def async_init(self) -> None:
"""Create a Tile session."""
if not self._client_established:
await self.request(
'put',
'clients/{0}'.format(self.client_uuid),
data={
'app_id': DEFAULT_APP_ID,
'app_version': DEFAULT_APP_VERSION,
'locale': self._locale
})
self._client_established = True
resp = await self.request(
'post',
'clients/{0}/sessions'.format(self.client_uuid),
data={
'email': self._email,
'password': self._password
})
if not self.user_uuid:
self.user_uuid = resp['result']['user']['user_uuid']
self._session_expiry = resp['result']['session_expiration_timestamp']
self.tiles = Tile(self.request, self.user_uuid)
|
python
|
{
"resource": ""
}
|
q20599
|
Tile.all
|
train
|
async def all(
self, whitelist: list = None, show_inactive: bool = False) -> list:
"""Get all Tiles for a user's account."""
list_data = await self._request(
'get', 'users/{0}/user_tiles'.format(self._user_uuid))
tile_uuid_list = [
tile['tile_uuid'] for tile in list_data['result']
if not whitelist or tile['tileType'] in whitelist
]
tile_data = await self._request(
'get',
'tiles',
params=[('tile_uuids', uuid) for uuid in tile_uuid_list])
return [
tile for tile in tile_data['result'].values()
if show_inactive or tile['visible'] is True
]
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.