sentence1 stringlengths 52 3.87M | sentence2 stringlengths 1 47.2k | label stringclasses 1 value |
|---|---|---|
def do_set_log_level(self, arg):
"""Set the log level.
Usage:
set_log_level i|v
Parameters:
log_level: i - info | v - verbose
"""
if arg in ['i', 'v']:
_LOGGING.info('Setting log level to %s', arg)
if arg == 'i':
_LOGGING.setLevel(logging.INFO)
_INSTEONPLM_LOGGING.setLevel(logging.INFO)
else:
_LOGGING.setLevel(logging.DEBUG)
_INSTEONPLM_LOGGING.setLevel(logging.DEBUG)
else:
_LOGGING.error('Log level value error.')
self.do_help('set_log_level') | Set the log level.
Usage:
set_log_level i|v
Parameters:
log_level: i - info | v - verbose | entailment |
def do_set_device(self, args):
"""Set the PLM OS device.
Device defaults to /dev/ttyUSB0
Usage:
set_device device
Arguments:
device: Required - INSTEON PLM device
"""
params = args.split()
device = None
try:
device = params[0]
except IndexError:
_LOGGING.error('Device name required.')
self.do_help('set_device')
if device:
self.tools.device = device | Set the PLM OS device.
Device defaults to /dev/ttyUSB0
Usage:
set_device device
Arguments:
device: Required - INSTEON PLM device | entailment |
def do_set_workdir(self, args):
"""Set the working directory.
The working directory is used to load and save known devices
to improve startup times. During startup the application
loads and saves a file `insteon_plm_device_info.dat`. This file
is saved in the working directory.
The working directory has no default value. If the working directory is
not set, the `insteon_plm_device_info.dat` file is not loaded or saved.
Usage:
set_workdir workdir
Arguments:
workdir: Required - Working directory to load and save devie list
"""
params = args.split()
workdir = None
try:
workdir = params[0]
except IndexError:
_LOGGING.error('Device name required.')
self.do_help('set_workdir')
if workdir:
self.tools.workdir = workdir | Set the working directory.
The working directory is used to load and save known devices
to improve startup times. During startup the application
loads and saves a file `insteon_plm_device_info.dat`. This file
is saved in the working directory.
The working directory has no default value. If the working directory is
not set, the `insteon_plm_device_info.dat` file is not loaded or saved.
Usage:
set_workdir workdir
Arguments:
workdir: Required - Working directory to load and save devie list | entailment |
def do_help(self, arg):
"""Help command.
Usage:
help [command]
Parameters:
command: Optional - command name to display detailed help
"""
cmds = arg.split()
if cmds:
func = getattr(self, 'do_{}'.format(cmds[0]))
if func:
_LOGGING.info(func.__doc__)
else:
_LOGGING.error('Command %s not found', cmds[0])
else:
_LOGGING.info("Available command list: ")
for curr_cmd in dir(self.__class__):
if curr_cmd.startswith("do_") and not curr_cmd == 'do_test':
print(" - ", curr_cmd[3:])
_LOGGING.info("For help with a command type `help command`") | Help command.
Usage:
help [command]
Parameters:
command: Optional - command name to display detailed help | entailment |
def do_add_device_override(self, args):
"""Add a device override to the IM.
Usage:
add_device_override address cat subcat [firmware]
Arguments:
address: Insteon address of the device to override
cat: Device category
subcat: Device subcategory
firmware: Optional - Device firmware
The device address can be written with our without the dots and in
upper or lower case, for example: 1a2b3c or 1A.2B.3C.
The category, subcategory and firmware numbers are written in hex
format, for example: 0x01 0x1b
Example:
add_device_override 1a2b3c 0x02 0x1a
"""
params = args.split()
addr = None
cat = None
subcat = None
firmware = None
error = None
try:
addr = Address(params[0])
cat = binascii.unhexlify(params[1][2:])
subcat = binascii.unhexlify(params[2][2:])
firmware = binascii.unhexlify(params[3][2:])
except IndexError:
error = 'missing'
except ValueError:
error = 'value'
if addr and cat and subcat:
self.tools.add_device_override(addr, cat, subcat, firmware)
else:
if error == 'missing':
_LOGGING.error('Device address, category and subcategory are '
'required.')
else:
_LOGGING.error('Check the vales for address, category and '
'subcategory.')
self.do_help('add_device_override') | Add a device override to the IM.
Usage:
add_device_override address cat subcat [firmware]
Arguments:
address: Insteon address of the device to override
cat: Device category
subcat: Device subcategory
firmware: Optional - Device firmware
The device address can be written with our without the dots and in
upper or lower case, for example: 1a2b3c or 1A.2B.3C.
The category, subcategory and firmware numbers are written in hex
format, for example: 0x01 0x1b
Example:
add_device_override 1a2b3c 0x02 0x1a | entailment |
def do_add_x10_device(self, args):
"""Add an X10 device to the IM.
Usage:
add_x10_device housecode unitcode type
Arguments:
housecode: Device housecode (A - P)
unitcode: Device unitcode (1 - 16)
type: Device type
Current device types are:
- OnOff
- Dimmable
- Sensor
Example:
add_x10_device M 12 OnOff
"""
params = args.split()
housecode = None
unitcode = None
dev_type = None
try:
housecode = params[0]
unitcode = int(params[1])
if unitcode not in range(1, 17):
raise ValueError
dev_type = params[2]
except IndexError:
pass
except ValueError:
_LOGGING.error('X10 unit code must be an integer 1 - 16')
unitcode = None
if housecode and unitcode and dev_type:
device = self.tools.add_x10_device(housecode, unitcode, dev_type)
if not device:
_LOGGING.error('Device not added. Please check the '
'information you provided.')
self.do_help('add_x10_device')
else:
_LOGGING.error('Device housecode, unitcode and type are '
'required.')
self.do_help('add_x10_device') | Add an X10 device to the IM.
Usage:
add_x10_device housecode unitcode type
Arguments:
housecode: Device housecode (A - P)
unitcode: Device unitcode (1 - 16)
type: Device type
Current device types are:
- OnOff
- Dimmable
- Sensor
Example:
add_x10_device M 12 OnOff | entailment |
def do_kpl_status(self, args):
"""Get the status of a KeypadLinc button.
Usage:
kpl_status address group
"""
params = args.split()
address = None
group = None
try:
address = params[0]
group = int(params[1])
except IndexError:
_LOGGING.error("Address and group are regquired")
self.do_help('kpl_status')
except TypeError:
_LOGGING.error("Group must be an integer")
self.do_help('kpl_status')
if address and group:
self.tools.kpl_status(address, group) | Get the status of a KeypadLinc button.
Usage:
kpl_status address group | entailment |
def do_kpl_on(self, args):
"""Turn on a KeypadLinc button.
Usage:
kpl_on address group
"""
params = args.split()
address = None
group = None
try:
address = params[0]
group = int(params[1])
except IndexError:
_LOGGING.error("Address and group are regquired")
self.do_help('kpl_status')
except TypeError:
_LOGGING.error("Group must be an integer")
self.do_help('kpl_status')
if address and group:
self.tools.kpl_on(address, group) | Turn on a KeypadLinc button.
Usage:
kpl_on address group | entailment |
def do_kpl_off(self, args):
"""Turn off a KeypadLinc button.
Usage:
kpl_on address group
"""
params = args.split()
address = None
group = None
try:
address = params[0]
group = int(params[1])
except IndexError:
_LOGGING.error("Address and group are regquired")
self.do_help('kpl_status')
except TypeError:
_LOGGING.error("Group must be an integer")
self.do_help('kpl_status')
if address and group:
self.tools.kpl_off(address, group) | Turn off a KeypadLinc button.
Usage:
kpl_on address group | entailment |
def do_kpl_set_on_mask(self, args):
"""Set the on mask for a KeypadLinc button.
Usage:
kpl_set_on_mask address group mask
"""
params = args.split()
address = None
group = None
mask_string = None
mask = None
try:
address = params[0]
group = int(params[1])
mask_string = params[2]
if mask_string[0:2].lower() == '0x':
mask = binascii.unhexlify(mask_string[2:])
else:
mask = int(mask_string)
except IndexError:
_LOGGING.error("Address, group and mask are regquired")
self.do_help('kpl_status')
except TypeError:
_LOGGING.error("Group must be an integer")
self.do_help('kpl_status')
if address and group and mask:
self.tools.kpl_set_on_mask(address, group, mask) | Set the on mask for a KeypadLinc button.
Usage:
kpl_set_on_mask address group mask | entailment |
def human(self):
"""Emit the address in human-readible format (AA.BB.CC)."""
strout = ''
first = True
for i in range(0, 28, 2):
if first:
first = False
else:
strout = strout + '.'
strout = strout + self.hex[i:i + 2]
return strout | Emit the address in human-readible format (AA.BB.CC). | entailment |
def bytes(self):
"""Emit the address in bytes format."""
byteout = bytearray()
for i in range(1, 15):
key = 'd' + str(i)
if self._userdata[key] is not None:
byteout.append(self._userdata[key])
else:
byteout.append(0x00)
return byteout | Emit the address in bytes format. | entailment |
def from_raw_message(cls, rawmessage):
"""Create a user data instance from a raw byte stream."""
empty = cls.create_empty(0x00)
userdata_dict = cls.normalize(empty, rawmessage)
return Userdata(userdata_dict) | Create a user data instance from a raw byte stream. | entailment |
def create_pattern(cls, userdata):
"""Create a user data instance with all values the same."""
empty = cls.create_empty(None)
userdata_dict = cls.normalize(empty, userdata)
return Userdata(userdata_dict) | Create a user data instance with all values the same. | entailment |
def template(cls, userdata):
"""Create a template instance used for message callbacks."""
ud = Userdata(cls.normalize(cls.create_empty(None), userdata))
return ud | Create a template instance used for message callbacks. | entailment |
def matches_pattern(self, other):
"""Test if the current instance matches a template instance."""
ismatch = False
if isinstance(other, Userdata):
for key in self._userdata:
if self._userdata[key] is None or other[key] is None:
ismatch = True
elif self._userdata[key] == other[key]:
ismatch = True
else:
ismatch = False
break
return ismatch | Test if the current instance matches a template instance. | entailment |
def create_empty(cls, val=0x00):
"""Create an empty Userdata object.
val: value to fill the empty user data fields with (default is 0x00)
"""
userdata_dict = {}
for i in range(1, 15):
key = 'd{}'.format(i)
userdata_dict.update({key: val})
return userdata_dict | Create an empty Userdata object.
val: value to fill the empty user data fields with (default is 0x00) | entailment |
def normalize(cls, empty, userdata):
"""Return normalized user data as a dictionary.
empty: an empty dictionary
userdata: data in the form of Userdata, dict or None
"""
if isinstance(userdata, Userdata):
return userdata.to_dict()
if isinstance(userdata, dict):
return cls._dict_to_dict(empty, userdata)
if isinstance(userdata, (bytes, bytearray)):
return cls._bytes_to_dict(empty, userdata)
if userdata is None:
return empty
raise ValueError | Return normalized user data as a dictionary.
empty: an empty dictionary
userdata: data in the form of Userdata, dict or None | entailment |
def hex(self):
"""Hexideciaml representation of the message in bytes."""
props = self._message_properties()
msg = bytearray([MESSAGE_START_CODE_0X02, self._code])
for prop in props:
# pylint: disable=unused-variable
for key, val in prop.items():
if val is None:
pass
elif isinstance(val, int):
msg.append(val)
elif isinstance(val, Address):
if val.addr is None:
pass
else:
msg.extend(val.bytes)
elif isinstance(val, MessageFlags):
msg.extend(val.bytes)
elif isinstance(val, bytearray):
msg.extend(val)
elif isinstance(val, bytes):
msg.extend(val)
elif isinstance(val, Userdata):
msg.extend(val.bytes)
return binascii.hexlify(msg).decode() | Hexideciaml representation of the message in bytes. | entailment |
def matches_pattern(self, other):
"""Return if the current message matches a message template.
Compare the current message to a template message to test matches
to a pattern.
"""
properties = self._message_properties()
ismatch = False
if isinstance(other, Message) and self.code == other.code:
for prop in properties:
for key, prop_val in prop.items():
if hasattr(other, key):
key_val = getattr(other, key)
ismatch = self._test_match(prop_val, key_val)
else:
ismatch = False
if not ismatch:
break
if not ismatch:
break
return ismatch | Return if the current message matches a message template.
Compare the current message to a template message to test matches
to a pattern. | entailment |
def _register_messages(self):
"""Register messages to listen for."""
template_on_group = StandardReceive.template(
commandtuple=COMMAND_LIGHT_ON_0X11_NONE,
address=self._address,
target=bytearray([0x00, 0x00, self._group]),
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None))
template_fast_on_group = StandardReceive.template(
commandtuple=COMMAND_LIGHT_ON_FAST_0X12_NONE,
address=self._address,
target=bytearray([0x00, 0x00, self._group]),
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None))
template_off_group = StandardReceive.template(
commandtuple=COMMAND_LIGHT_OFF_0X13_0X00,
address=self._address,
target=bytearray([0x00, 0x00, self._group]),
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),
cmd2=None)
template_fast_off_group = StandardReceive.template(
commandtuple=COMMAND_LIGHT_OFF_FAST_0X14_0X00,
address=self._address,
target=bytearray([0x00, 0x00, self._group]),
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),
cmd2=None)
template_manual_on_group = StandardReceive.template(
commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_ON_0X23_0X00,
address=self._address,
target=bytearray([0x00, 0x00, self._group]),
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),
cmd2=None)
template_manual_off_group = StandardReceive.template(
commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_OFF_0X22_0X00,
address=self._address,
target=bytearray([0x00, 0x00, self._group]),
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_BROADCAST, None),
cmd2=None)
template_on_cleanup = StandardReceive.template(
commandtuple=COMMAND_LIGHT_ON_0X11_NONE,
address=self._address,
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),
cmd2=self._group)
template_fast_on_cleanup = StandardReceive.template(
commandtuple=COMMAND_LIGHT_ON_FAST_0X12_NONE,
address=self._address,
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),
cmd2=self._group)
template_off_cleanup = StandardReceive.template(
commandtuple=COMMAND_LIGHT_OFF_0X13_0X00,
address=self._address,
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),
cmd2=self._group)
template_fast_off_cleanup = StandardReceive.template(
commandtuple=COMMAND_LIGHT_OFF_FAST_0X14_0X00,
address=self._address,
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),
cmd2=self._group)
template_manual_on_cleanup = StandardReceive.template(
commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_ON_0X23_0X00,
address=self._address,
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),
cmd2=self._group)
template_manual_off_cleanup = StandardReceive.template(
commandtuple=COMMAND_LIGHT_MANUALLY_TURNED_OFF_0X22_0X00,
address=self._address,
flags=MessageFlags.template(MESSAGE_TYPE_ALL_LINK_CLEANUP, None),
cmd2=self._group)
self._message_callbacks.add(template_on_group,
self._on_message_received)
self._message_callbacks.add(template_fast_on_group,
self._on_message_received)
self._message_callbacks.add(template_off_group,
self._off_message_received)
self._message_callbacks.add(template_fast_off_group,
self._off_message_received)
self._message_callbacks.add(template_manual_on_group,
self._manual_change_received)
self._message_callbacks.add(template_manual_off_group,
self._manual_change_received)
self._message_callbacks.add(template_on_cleanup,
self._on_message_received)
self._message_callbacks.add(template_fast_on_cleanup,
self._on_message_received)
self._message_callbacks.add(template_off_cleanup,
self._off_message_received)
self._message_callbacks.add(template_fast_off_cleanup,
self._off_message_received)
self._message_callbacks.add(template_manual_on_cleanup,
self._manual_change_received)
self._message_callbacks.add(template_manual_off_cleanup,
self._manual_change_received) | Register messages to listen for. | entailment |
def on(self):
"""Send ON command to device."""
on_command = StandardSend(self._address,
COMMAND_LIGHT_ON_0X11_NONE, 0xff)
self._send_method(on_command,
self._on_message_received) | Send ON command to device. | entailment |
def off(self):
"""Send OFF command to device."""
self._send_method(StandardSend(self._address,
COMMAND_LIGHT_OFF_0X13_0X00),
self._off_message_received) | Send OFF command to device. | entailment |
def _send_status_0x01_request(self):
"""Sent status request to device."""
status_command = StandardSend(self._address,
COMMAND_LIGHT_STATUS_REQUEST_0X19_0X01)
self._send_method(status_command, self._status_message_0x01_received) | Sent status request to device. | entailment |
def _status_message_0x01_received(self, msg):
"""Handle status received messages.
The following status values can be received:
0x00 = Both Outlets Off
0x01 = Only Top Outlet On
0x02 = Only Bottom Outlet On
0x03 = Both Outlets On
"""
if msg.cmd2 == 0x00 or msg.cmd2 == 0x02:
self._update_subscribers(0x00)
elif msg.cmd2 == 0x01 or msg.cmd2 == 0x03:
self._update_subscribers(0xff)
else:
raise ValueError | Handle status received messages.
The following status values can be received:
0x00 = Both Outlets Off
0x01 = Only Top Outlet On
0x02 = Only Bottom Outlet On
0x03 = Both Outlets On | entailment |
def on(self):
"""Send an ON message to device group."""
on_command = ExtendedSend(self._address,
COMMAND_LIGHT_ON_0X11_NONE,
self._udata,
cmd2=0xff)
on_command.set_checksum()
self._send_method(on_command, self._on_message_received) | Send an ON message to device group. | entailment |
def off(self):
"""Send an OFF message to device group."""
off_command = ExtendedSend(self._address,
COMMAND_LIGHT_OFF_0X13_0X00,
self._udata)
off_command.set_checksum()
self._send_method(off_command, self._off_message_received) | Send an OFF message to device group. | entailment |
def _send_status_0x01_request(self):
"""Send a status request."""
status_command = StandardSend(self._address,
COMMAND_LIGHT_STATUS_REQUEST_0X19_0X01)
self._send_method(status_command, self._status_message_received) | Send a status request. | entailment |
def _status_message_received(self, msg):
"""Receive a status message.
The following status values can be received:
0x00 = Both Outlets Off
0x01 = Only Top Outlet On
0x02 = Only Bottom Outlet On
0x03 = Both Outlets On
"""
if msg.cmd2 == 0x00 or msg.cmd2 == 0x01:
self._update_subscribers(0x00)
elif msg.cmd2 == 0x02 or msg.cmd2 == 0x03:
self._update_subscribers(0xff)
else:
raise ValueError | Receive a status message.
The following status values can be received:
0x00 = Both Outlets Off
0x01 = Only Top Outlet On
0x02 = Only Bottom Outlet On
0x03 = Both Outlets On | entailment |
def close(self):
"""Send CLOSE command to device."""
close_command = StandardSend(self._address,
COMMAND_LIGHT_OFF_0X13_0X00)
self._send_method(close_command, self._close_message_received) | Send CLOSE command to device. | entailment |
def led_changed(self, addr, group, val):
"""Capture a change to the LED for this button."""
_LOGGER.debug("Button %d LED changed from %d to %d",
self._group, self._value, val)
led_on = bool(val)
if led_on != bool(self._value):
self._update_subscribers(int(led_on)) | Capture a change to the LED for this button. | entailment |
def set_on_mask(self, mask):
"""Set the on mask for the current group/button."""
set_cmd = self._create_set_property_msg('_on_mask', 0x02, mask)
self._send_method(set_cmd, self._property_set) | Set the on mask for the current group/button. | entailment |
def set_off_mask(self, mask):
"""Set the off mask for the current group/button."""
set_cmd = self._create_set_property_msg('_off_mask', 0x03, mask)
self._send_method(set_cmd, self._property_set) | Set the off mask for the current group/button. | entailment |
def set_x10_address(self, x10address):
"""Set the X10 address for the current group/button."""
set_cmd = self._create_set_property_msg('_x10_house_code', 0x04,
x10address)
self._send_method(set_cmd, self._property_set) | Set the X10 address for the current group/button. | entailment |
def set_ramp_rate(self, ramp_rate):
"""Set the X10 address for the current group/button."""
set_cmd = self._create_set_property_msg('_ramp_rate', 0x05,
ramp_rate)
self._send_method(set_cmd, self._property_set) | Set the X10 address for the current group/button. | entailment |
def set_on_level(self, val):
"""Set on level for the button/group."""
on_cmd = self._create_set_property_msg("_on_level", 0x06,
val)
self._send_method(on_cmd, self._property_set)
self._send_method(on_cmd, self._on_message_received) | Set on level for the button/group. | entailment |
def set_led_brightness(self, brightness):
"""Set the LED brightness for the current group/button."""
set_cmd = self._create_set_property_msg("_led_brightness", 0x07,
brightness)
self._send_method(set_cmd, self._property_set) | Set the LED brightness for the current group/button. | entailment |
def set_non_toggle_mask(self, non_toggle_mask):
"""Set the non_toggle_mask for the current group/button."""
set_cmd = self._create_set_property_msg("_non_toggle_mask", 0x08,
non_toggle_mask)
self._send_method(set_cmd, self._property_set) | Set the non_toggle_mask for the current group/button. | entailment |
def set_x10_all_bit_mask(self, x10_all_bit_mask):
"""Set the x10_all_bit_mask for the current group/button."""
set_cmd = self._create_set_property_msg("_x10_all_bit_mask", 0x0a,
x10_all_bit_mask)
self._send_method(set_cmd, self._property_set) | Set the x10_all_bit_mask for the current group/button. | entailment |
def set_trigger_group_bit_mask(self, trigger_group_bit_mask):
"""Set the trigger_group_bit_mask for the current group/button."""
set_cmd = self._create_set_property_msg("_trigger_group_bit_mask",
0x0c, trigger_group_bit_mask)
self._send_method(set_cmd, self._property_set) | Set the trigger_group_bit_mask for the current group/button. | entailment |
def scene_on(self):
"""Trigger group/scene to ON level."""
user_data = Userdata({'d1': self._group,
'd2': 0x00,
'd3': 0x00,
'd4': 0x11,
'd5': 0xff,
'd6': 0x00})
self._set_sent_property(DIMMABLE_KEYPAD_SCENE_ON_LEVEL, 0xff)
cmd = ExtendedSend(self._address,
COMMAND_EXTENDED_TRIGGER_ALL_LINK_0X30_0X00,
user_data)
cmd.set_checksum()
_LOGGER.debug('Calling scene_on and sending response to '
'_received_scene_triggered')
self._send_method(cmd, self._received_scene_triggered) | Trigger group/scene to ON level. | entailment |
def scene_off(self):
"""Trigger group/scene to OFF level."""
user_data = Userdata({'d1': self._group,
'd2': 0x00,
'd3': 0x00,
'd4': 0x13,
'd5': 0x00,
'd6': 0x00})
self._set_sent_property(DIMMABLE_KEYPAD_SCENE_ON_LEVEL, 0x00)
cmd = ExtendedSend(self._address,
COMMAND_EXTENDED_TRIGGER_ALL_LINK_0X30_0X00,
user_data)
cmd.set_checksum()
self._send_method(cmd, self._received_scene_triggered) | Trigger group/scene to OFF level. | entailment |
def extended_status_request(self):
"""Send status request for group/button."""
self._status_received = False
user_data = Userdata({'d1': self.group,
'd2': 0x00})
cmd = ExtendedSend(self._address,
COMMAND_EXTENDED_GET_SET_0X2E_0X00,
userdata=user_data)
cmd.set_checksum()
self._send_method(cmd, self._status_message_received, True) | Send status request for group/button. | entailment |
def _status_message_received(self, msg):
"""Receive confirmation that the status message is coming.
The real status message is the extended direct message.
"""
if not self._status_received:
asyncio.ensure_future(self._confirm_status_received(),
loop=self._loop) | Receive confirmation that the status message is coming.
The real status message is the extended direct message. | entailment |
def _status_extended_message_received(self, msg):
"""Receeive an extended status message.
Status message received:
cmd1: 0x2e
cmd2: 0x00
flags: Direct Extended
d1: group
d2: 0x01
d3: On Mask
d4: Off Mask
d5: X10 House Code
d6: X10 Unit
d7: Ramp Rate
d8: On-Level
d9: LED Brightness
d10: Non-Toggle Mask
d11: LED Bit Mask
d12: X10 ALL Bit Mask
d13: On/Off Bit Mask
d14: Check sum
"""
self._status_received = True
self._status_retries = 0
_LOGGER.debug("Extended status message received")
if self._status_response_lock.locked():
self._status_response_lock.release()
user_data = msg.userdata
# self._update_subscribers(user_data['d8'])
self._set_status_data(user_data) | Receeive an extended status message.
Status message received:
cmd1: 0x2e
cmd2: 0x00
flags: Direct Extended
d1: group
d2: 0x01
d3: On Mask
d4: Off Mask
d5: X10 House Code
d6: X10 Unit
d7: Ramp Rate
d8: On-Level
d9: LED Brightness
d10: Non-Toggle Mask
d11: LED Bit Mask
d12: X10 ALL Bit Mask
d13: On/Off Bit Mask
d14: Check sum | entailment |
def _property_set(self, msg):
"""Set command received and acknowledged."""
prop = self._sent_property.get('prop')
if prop and hasattr(self, prop):
setattr(self, prop, self._sent_property.get('val'))
self._sent_property = {} | Set command received and acknowledged. | entailment |
def _set_status_data(self, userdata):
"""Set status properties from userdata response.
Response values:
d3: On Mask
d4: Off Mask
d5: X10 House Code
d6: X10 Unit
d7: Ramp Rate
d8: On-Level
d9: LED Brightness
d10: Non-Toggle Mask
d11: LED Bit Mask
d12: X10 ALL Bit Mask
d13: On/Off Bit Mask
"""
self._on_mask = userdata['d3']
self._off_mask = userdata['d4']
self._x10_house_code = userdata['d5']
self._x10_unit = userdata['d6']
self._ramp_rate = userdata['d7']
self._on_level = userdata['d8']
self._led_brightness = userdata['d9']
self._non_toggle_mask = userdata['d10']
self._led_bit_mask = userdata['d11']
self._x10_all_bit_mask = userdata['d12']
self._on_off_bit_mask = userdata['d13']
self._trigger_group_bit_mask = userdata['d14'] | Set status properties from userdata response.
Response values:
d3: On Mask
d4: Off Mask
d5: X10 House Code
d6: X10 Unit
d7: Ramp Rate
d8: On-Level
d9: LED Brightness
d10: Non-Toggle Mask
d11: LED Bit Mask
d12: X10 ALL Bit Mask
d13: On/Off Bit Mask | entailment |
def _create_set_property_msg(self, prop, cmd, val):
"""Create an extended message to set a property.
Create an extended message with:
cmd1: 0x2e
cmd2: 0x00
flags: Direct Extended
d1: group
d2: cmd
d3: val
d4 - d14: 0x00
Parameters:
prop: Property name to update
cmd: Command value
0x02: on mask
0x03: off mask
0x04: x10 house code
0x05: ramp rate
0x06: on level
0x07: LED brightness
0x08: Non-Toggle mask
0x09: LED bit mask (Do not use in this class. Use LED class)
0x0a: X10 All bit mask
0x0c: Trigger group bit mask
val: New property value
"""
user_data = Userdata({'d1': self.group,
'd2': cmd,
'd3': val})
msg = ExtendedSend(self._address,
COMMAND_EXTENDED_GET_SET_0X2E_0X00,
user_data)
msg.set_checksum()
self._set_sent_property(prop, val)
return msg | Create an extended message to set a property.
Create an extended message with:
cmd1: 0x2e
cmd2: 0x00
flags: Direct Extended
d1: group
d2: cmd
d3: val
d4 - d14: 0x00
Parameters:
prop: Property name to update
cmd: Command value
0x02: on mask
0x03: off mask
0x04: x10 house code
0x05: ramp rate
0x06: on level
0x07: LED brightness
0x08: Non-Toggle mask
0x09: LED bit mask (Do not use in this class. Use LED class)
0x0a: X10 All bit mask
0x0c: Trigger group bit mask
val: New property value | entailment |
def on(self, group):
"""Turn the LED on for a group."""
asyncio.ensure_future(self._send_led_on_off_request(group, 1),
loop=self._loop) | Turn the LED on for a group. | entailment |
def off(self, group):
"""Turn the LED off for a group."""
asyncio.ensure_future(self._send_led_on_off_request(group, 0),
loop=self._loop) | Turn the LED off for a group. | entailment |
def register_led_updates(self, callback, button):
"""Register a callback when a specific button LED changes."""
button_callbacks = self._button_observer_callbacks.get(button)
if not button_callbacks:
self._button_observer_callbacks[button] = []
_LOGGER.debug('New callback for button %d', button)
self._button_observer_callbacks[button].append(callback) | Register a callback when a specific button LED changes. | entailment |
def _set_led_value(self, group, val):
"""Set the LED value and confirm with a status check."""
new_bitmask = set_bit(self._value, group, bool(val))
self._set_led_bitmask(new_bitmask) | Set the LED value and confirm with a status check. | entailment |
def _bit_value(self, group, val):
"""Set the LED on/off value from the LED bitmap."""
bitshift = group - 1
if val:
new_value = self._value | (1 << bitshift)
else:
new_value = self._value & (0xff & ~(1 << bitshift))
return new_value | Set the LED on/off value from the LED bitmap. | entailment |
def set(self, mode):
"""Set the thermostat mode.
Mode optons:
OFF = 0x00,
HEAT = 0x01,
COOL = 0x02,
AUTO = 0x03,
FAN_AUTO = 0x04,
FAN_ALWAYS_ON = 0x8
"""
new_mode = None
if mode == ThermostatMode.OFF:
new_mode = COMMAND_THERMOSTAT_CONTROL_OFF_ALL_0X6B_0X09
elif mode == ThermostatMode.HEAT:
new_mode = COMMAND_THERMOSTAT_CONTROL_ON_HEAT_0X6B_0X04
elif mode == ThermostatMode.COOL:
new_mode = COMMAND_THERMOSTAT_CONTROL_ON_COOL_0X6B_0X05
elif mode == ThermostatMode.AUTO:
new_mode = COMMAND_THERMOSTAT_CONTROL_ON_AUTO_0X6B_0X06
if new_mode:
msg = ExtendedSend(address=self._address,
commandtuple=new_mode,
userdata=Userdata())
msg.set_checksum()
self._send_method(msg, self._mode_change_ack) | Set the thermostat mode.
Mode optons:
OFF = 0x00,
HEAT = 0x01,
COOL = 0x02,
AUTO = 0x03,
FAN_AUTO = 0x04,
FAN_ALWAYS_ON = 0x8 | entailment |
def set(self, mode):
"""Set the thermostat mode.
Mode optons:
OFF = 0x00,
HEAT = 0x01,
COOL = 0x02,
AUTO = 0x03,
FAN_AUTO = 0x04,
FAN_ALWAYS_ON = 0x8
"""
if mode == ThermostatMode.FAN_AUTO:
new_mode = COMMAND_THERMOSTAT_CONTROL_OFF_FAN_0X6B_0X08
elif mode == ThermostatMode.FAN_ALWAYS_ON:
new_mode = COMMAND_THERMOSTAT_CONTROL_ON_FAN_0X6B_0X07
if new_mode:
msg = ExtendedSend(address=self._address,
commandtuple=new_mode,
userdata=Userdata())
msg.set_checksum()
self._send_method(msg, self._mode_change_ack) | Set the thermostat mode.
Mode optons:
OFF = 0x00,
HEAT = 0x01,
COOL = 0x02,
AUTO = 0x03,
FAN_AUTO = 0x04,
FAN_ALWAYS_ON = 0x8 | entailment |
def set(self, val):
"""Set the cool set point."""
msg = ExtendedSend(
address=self._address,
commandtuple=COMMAND_THERMOSTAT_SET_COOL_SETPOINT_0X6C_NONE,
cmd2=int(val * 2),
userdata=Userdata())
msg.set_checksum()
self._send_method(msg, self._set_cool_point_ack) | Set the cool set point. | entailment |
def set(self, val):
"""Set the heat set point."""
msg = ExtendedSend(
address=self._address,
commandtuple=COMMAND_THERMOSTAT_SET_HEAT_SETPOINT_0X6D_NONE,
cmd2=int(val * 2),
userdata=Userdata())
msg.set_checksum()
self._send_method(msg, self._set_heat_point_ack) | Set the heat set point. | entailment |
def add_device_callback(self, callback):
"""Register a callback to be invoked when a new device appears."""
_LOGGER.debug('Added new callback %s ', callback)
self._cb_new_device.append(callback) | Register a callback to be invoked when a new device appears. | entailment |
def add_override(self, addr, key, value):
"""Register an attribute override for a device."""
address = Address(str(addr)).id
_LOGGER.debug('New override for %s %s is %s', address, key, value)
device_override = self._overrides.get(address, {})
device_override[key] = value
self._overrides[address] = device_override | Register an attribute override for a device. | entailment |
def create_device_from_category(self, plm, addr, cat, subcat,
product_key=0x00):
"""Create a new device from the cat, subcat and product_key data."""
saved_device = self._saved_devices.get(Address(addr).id, {})
cat = saved_device.get('cat', cat)
subcat = saved_device.get('subcat', subcat)
product_key = saved_device.get('product_key', product_key)
device_override = self._overrides.get(Address(addr).id, {})
cat = device_override.get('cat', cat)
subcat = device_override.get('subcat', subcat)
product_key = device_override.get('firmware', product_key)
product_key = device_override.get('product_key', product_key)
return insteonplm.devices.create(plm, addr, cat, subcat, product_key) | Create a new device from the cat, subcat and product_key data. | entailment |
def has_saved(self, addr):
"""Test if device has data from the saved data file."""
saved = False
if self._saved_devices.get(addr, None) is not None:
saved = True
return saved | Test if device has data from the saved data file. | entailment |
def has_override(self, addr):
"""Test if device has data from a device override setting."""
override = False
if self._overrides.get(addr, None) is not None:
override = True
return override | Test if device has data from a device override setting. | entailment |
def add_known_devices(self, plm):
"""Add devices from the saved devices or from the device overrides."""
from insteonplm.devices import ALDBStatus
for addr in self._saved_devices:
if not self._devices.get(addr):
saved_device = self._saved_devices.get(Address(addr).id, {})
cat = saved_device.get('cat')
subcat = saved_device.get('subcat')
product_key = saved_device.get('firmware')
product_key = saved_device.get('product_key', product_key)
device = self.create_device_from_category(
plm, addr, cat, subcat, product_key)
if device:
_LOGGER.debug('Device with id %s added to device list '
'from saved device data.', addr)
aldb_status = saved_device.get('aldb_status', 0)
device.aldb.status = ALDBStatus(aldb_status)
aldb = saved_device.get('aldb', {})
device.aldb.load_saved_records(aldb_status, aldb)
self[addr] = device
for addr in self._overrides:
if not self._devices.get(addr):
device_override = self._overrides.get(Address(addr).id, {})
cat = device_override.get('cat')
subcat = device_override.get('subcat')
product_key = device_override.get('firmware')
product_key = device_override.get('product_key', product_key)
device = self.create_device_from_category(
plm, addr, cat, subcat, product_key)
if device:
_LOGGER.debug('Device with id %s added to device list '
'from device override data.', addr)
self[addr] = device | Add devices from the saved devices or from the device overrides. | entailment |
def save_device_info(self):
"""Save all device information to the device info file."""
if self._workdir is not None:
devices = []
for addr in self._devices:
device = self._devices.get(addr)
if not device.address.is_x10:
aldb = {}
for mem in device.aldb:
rec = device.aldb[mem]
if rec:
aldbRec = {'memory': mem,
'control_flags': rec.control_flags.byte,
'group': rec.group,
'address': rec.address.id,
'data1': rec.data1,
'data2': rec.data2,
'data3': rec.data3}
aldb[mem] = aldbRec
deviceInfo = {'address': device.address.id,
'cat': device.cat,
'subcat': device.subcat,
'product_key': device.product_key,
'aldb_status': device.aldb.status.value,
'aldb': aldb}
devices.append(deviceInfo)
asyncio.ensure_future(self._write_saved_device_info(devices),
loop=self._loop) | Save all device information to the device info file. | entailment |
def _add_saved_device_info(self, **kwarg):
"""Register device info from the saved data file."""
addr = kwarg.get('address')
_LOGGER.debug('Found saved device with address %s', addr)
self._saved_devices[addr] = kwarg | Register device info from the saved data file. | entailment |
async def load_saved_device_info(self):
"""Load device information from the device info file."""
_LOGGER.debug("Loading saved device info.")
deviceinfo = []
if self._workdir:
_LOGGER.debug("Really Loading saved device info.")
try:
device_file = '{}/{}'.format(self._workdir, DEVICE_INFO_FILE)
with open(device_file, 'r') as infile:
try:
deviceinfo = json.load(infile)
_LOGGER.debug("Saved device file loaded")
except json.decoder.JSONDecodeError:
_LOGGER.debug("Loading saved device file failed")
except FileNotFoundError:
_LOGGER.debug("Saved device file not found")
for device in deviceinfo:
self._add_saved_device_info(**device) | Load device information from the device info file. | entailment |
def unit_code_msg(housecode, unitcode):
"""Create an X10 message to send the house code and unit code."""
house_byte = 0
unit_byte = 0
if isinstance(housecode, str):
house_byte = insteonplm.utils.housecode_to_byte(housecode) << 4
unit_byte = insteonplm.utils.unitcode_to_byte(unitcode)
elif isinstance(housecode, int) and housecode < 16:
house_byte = housecode << 4
unit_byte = unitcode
else:
house_byte = housecode
unit_byte = unitcode
return X10Received(house_byte + unit_byte, 0x00) | Create an X10 message to send the house code and unit code. | entailment |
def command_msg(housecode, command):
"""Create an X10 message to send the house code and a command code."""
house_byte = 0
if isinstance(housecode, str):
house_byte = insteonplm.utils.housecode_to_byte(housecode) << 4
elif isinstance(housecode, int) and housecode < 16:
house_byte = housecode << 4
else:
house_byte = housecode
return X10Received(house_byte + command, 0x80) | Create an X10 message to send the house code and a command code. | entailment |
async def create_http_connection(loop, protocol_factory, host, port=25105,
auth=None):
"""Create an HTTP session used to connect to the Insteon Hub."""
protocol = protocol_factory()
transport = HttpTransport(loop, protocol, host, port, auth)
_LOGGER.debug("create_http_connection Finished creating connection")
return (transport, protocol) | Create an HTTP session used to connect to the Insteon Hub. | entailment |
async def create(cls, device='/dev/ttyUSB0', host=None,
username=None, password=None, port=25010, hub_version=2,
auto_reconnect=True, loop=None, workdir=None,
poll_devices=True, load_aldb=True):
"""Create a connection to a specific device.
Here is where we supply the device and callback callables we
expect for this PLM class object.
:param device:
Unix device where the PLM is attached
:param address:
IP Address of the Hub
:param username:
User name for connecting to the Hub
:param password:
Password for connecting to the Hub
:param auto_reconnect:
Should the Connection try to automatically reconnect if needed?
:param loop:
asyncio.loop for async operation
:param load_aldb:
Should the ALDB be loaded on connect
:type device:
str
:type auto_reconnect:
boolean
:type loop:
asyncio.loop
:type update_callback:
callable
"""
_LOGGER.debug("Starting Modified Connection.create")
conn = cls(device=device, host=host, username=username,
password=password, port=port, hub_version=hub_version,
loop=loop, retry_interval=1, auto_reconnect=auto_reconnect)
def connection_lost():
"""Respond to Protocol connection lost."""
if conn.auto_reconnect and not conn.closing:
_LOGGER.debug("Reconnecting to transport")
asyncio.ensure_future(conn.reconnect(), loop=conn.loop)
protocol_class = PLM
if conn.host and conn.hub_version == 2:
protocol_class = Hub
conn.protocol = protocol_class(
connection_lost_callback=connection_lost,
loop=conn.loop,
workdir=workdir,
poll_devices=poll_devices,
load_aldb=load_aldb)
await conn.reconnect()
_LOGGER.debug("Ending Connection.create")
return conn | Create a connection to a specific device.
Here is where we supply the device and callback callables we
expect for this PLM class object.
:param device:
Unix device where the PLM is attached
:param address:
IP Address of the Hub
:param username:
User name for connecting to the Hub
:param password:
Password for connecting to the Hub
:param auto_reconnect:
Should the Connection try to automatically reconnect if needed?
:param loop:
asyncio.loop for async operation
:param load_aldb:
Should the ALDB be loaded on connect
:type device:
str
:type auto_reconnect:
boolean
:type loop:
asyncio.loop
:type update_callback:
callable | entailment |
async def reconnect(self):
"""Reconnect to the modem."""
_LOGGER.debug('starting Connection.reconnect')
await self._connect()
while self._closed:
await self._retry_connection()
_LOGGER.debug('ending Connection.reconnect') | Reconnect to the modem. | entailment |
async def close(self, event):
"""Close the PLM device connection and don't try to reconnect."""
_LOGGER.info('Closing connection to Insteon Modem')
self._closing = True
self._auto_reconnect = False
await self.protocol.close()
if self.protocol.transport:
self.protocol.transport.close()
await asyncio.sleep(0, loop=self._loop)
_LOGGER.info('Insteon Modem connection closed') | Close the PLM device connection and don't try to reconnect. | entailment |
def dump_conndata(self):
"""Developer tool for debugging forensics."""
attrs = vars(self)
return ', '.join("%s: %s" % item for item in attrs.items()) | Developer tool for debugging forensics. | entailment |
def on(self):
"""Send the On command to an X10 device."""
msg = X10Send.unit_code_msg(self.address.x10_housecode,
self.address.x10_unitcode)
self._send_method(msg)
msg = X10Send.command_msg(self.address.x10_housecode,
X10_COMMAND_ON)
self._send_method(msg, False)
self._update_subscribers(0xff) | Send the On command to an X10 device. | entailment |
def off(self):
"""Send the Off command to an X10 device."""
msg = X10Send.unit_code_msg(self.address.x10_housecode,
self.address.x10_unitcode)
self._send_method(msg)
msg = X10Send.command_msg(self.address.x10_housecode,
X10_COMMAND_OFF)
self._send_method(msg, False)
self._update_subscribers(0x00) | Send the Off command to an X10 device. | entailment |
def set_level(self, val):
"""Set the device ON LEVEL."""
if val == 0:
self.off()
elif val == 255:
self.on()
else:
setlevel = 255
if val < 1:
setlevel = val * 255
elif val <= 0xff:
setlevel = val
change = setlevel - self._value
increment = 255 / self._steps
steps = round(abs(change) / increment)
print('Steps: ', steps)
if change > 0:
method = self.brighten
self._value += round(steps * increment)
self._value = min(255, self._value)
else:
method = self.dim
self._value -= round(steps * increment)
self._value = max(0, self._value)
# pylint: disable=unused-variable
for step in range(0, steps):
method(True)
self._update_subscribers(self._value) | Set the device ON LEVEL. | entailment |
def brighten(self, defer_update=False):
"""Brighten the device one step."""
msg = X10Send.unit_code_msg(self.address.x10_housecode,
self.address.x10_unitcode)
self._send_method(msg)
msg = X10Send.command_msg(self.address.x10_housecode,
X10_COMMAND_BRIGHT)
self._send_method(msg, False)
if not defer_update:
self._update_subscribers(self._value + 255 / self._steps) | Brighten the device one step. | entailment |
def dim(self, defer_update=False):
"""Dim the device one step."""
msg = X10Send.unit_code_msg(self.address.x10_housecode,
self.address.x10_unitcode)
self._send_method(msg)
msg = X10Send.command_msg(self.address.x10_housecode,
X10_COMMAND_DIM)
self._send_method(msg, False)
if not defer_update:
self._update_subscribers(self._value - 255 / self._steps) | Dim the device one step. | entailment |
def matches_pattern(self, other):
"""Test Address object matches the pattern of another object."""
matches = False
if hasattr(other, 'addr'):
if self.addr is None or other.addr is None:
matches = True
else:
matches = self.addr == other.addr
return matches | Test Address object matches the pattern of another object. | entailment |
def _normalize(self, addr):
"""Take any format of address and turn it into a hex string."""
normalize = None
if isinstance(addr, Address):
normalize = addr.addr
self._is_x10 = addr.is_x10
elif isinstance(addr, bytearray):
normalize = binascii.unhexlify(binascii.hexlify(addr).decode())
elif isinstance(addr, bytes):
normalize = addr
elif isinstance(addr, str):
addr = addr.replace('.', '')
addr = addr[0:6]
if addr[0:3].lower() == 'x10':
x10_addr = Address.x10(addr[3:4], int(addr[4:6]))
normalize = x10_addr.addr
self._is_x10 = True
else:
normalize = binascii.unhexlify(addr.lower())
elif addr is None:
normalize = None
else:
_LOGGER.warning('Address class init with unknown type %s: %r',
type(addr), addr)
return normalize | Take any format of address and turn it into a hex string. | entailment |
def human(self):
"""Emit the address in human-readible format (AA.BB.CC)."""
addrstr = '00.00.00'
if self.addr:
if self._is_x10:
housecode_byte = self.addr[1]
housecode = insteonplm.utils.byte_to_housecode(housecode_byte)
unitcode_byte = self.addr[2]
unitcode = insteonplm.utils.byte_to_unitcode(unitcode_byte)
addrstr = 'X10.{}.{:02d}'.format(housecode.upper(), unitcode)
else:
addrstr = '{}.{}.{}'.format(self.hex[0:2],
self.hex[2:4],
self.hex[4:6]).upper()
return addrstr | Emit the address in human-readible format (AA.BB.CC). | entailment |
def hex(self):
"""Emit the address in bare hex format (aabbcc)."""
addrstr = '000000'
if self.addr is not None:
addrstr = binascii.hexlify(self.addr).decode()
return addrstr | Emit the address in bare hex format (aabbcc). | entailment |
def bytes(self):
"""Emit the address in bytes format."""
addrbyte = b'\x00\x00\x00'
if self.addr is not None:
addrbyte = self.addr
return addrbyte | Emit the address in bytes format. | entailment |
def id(self):
"""Return the ID of the device address."""
dev_id = ''
if self._is_x10:
dev_id = 'x10{}{:02d}'.format(self.x10_housecode,
self.x10_unitcode)
else:
dev_id = self.hex
return dev_id | Return the ID of the device address. | entailment |
def x10_housecode(self):
"""Emit the X10 house code."""
housecode = None
if self.is_x10:
housecode = insteonplm.utils.byte_to_housecode(self.addr[1])
return housecode | Emit the X10 house code. | entailment |
def x10_unitcode(self):
"""Emit the X10 unit code."""
unitcode = None
if self.is_x10:
unitcode = insteonplm.utils.byte_to_unitcode(self.addr[2])
return unitcode | Emit the X10 unit code. | entailment |
def x10(cls, housecode, unitcode):
"""Create an X10 device address."""
if housecode.lower() in ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p']:
byte_housecode = insteonplm.utils.housecode_to_byte(housecode)
else:
if isinstance(housecode, str):
_LOGGER.error('X10 house code error: %s', housecode)
else:
_LOGGER.error('X10 house code is not a string')
raise ValueError
# 20, 21 and 22 for All Units Off, All Lights On and All Lights Off
# 'fake' units
if unitcode in range(1, 17) or unitcode in range(20, 23):
byte_unitcode = insteonplm.utils.unitcode_to_byte(unitcode)
else:
if isinstance(unitcode, int):
_LOGGER.error('X10 unit code error: %d', unitcode)
else:
_LOGGER.error('X10 unit code is not an integer 1 - 16')
raise ValueError
addr = Address(bytearray([0x00, byte_housecode, byte_unitcode]))
addr.is_x10 = True
return addr | Create an X10 device address. | entailment |
def from_raw_message(cls, rawmessage):
"""Create message from raw byte stream."""
return ManageAllLinkRecord(rawmessage[2:3],
rawmessage[3:4],
rawmessage[4:7],
rawmessage[7:8],
rawmessage[8:9],
rawmessage[9:10],
rawmessage[10:11]) | Create message from raw byte stream. | entailment |
def _term_to_xapian_value(term, field_type):
"""
Converts a term to a serialized
Xapian value based on the field_type.
"""
assert field_type in FIELD_TYPES
def strf(dt):
"""
Equivalent to datetime.datetime.strptime(dt, DATETIME_FORMAT)
but accepts years below 1900 (see http://stackoverflow.com/q/10263956/931303)
"""
return '%04d%02d%02d%02d%02d%02d' % (
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
if field_type == 'boolean':
assert isinstance(term, bool)
if term:
value = 't'
else:
value = 'f'
elif field_type == 'integer':
value = INTEGER_FORMAT % term
elif field_type == 'float':
value = xapian.sortable_serialise(term)
elif field_type == 'date' or field_type == 'datetime':
if field_type == 'date':
# http://stackoverflow.com/a/1937636/931303 and comments
term = datetime.datetime.combine(term, datetime.time())
value = strf(term)
else: # field_type == 'text'
value = _to_xapian_term(term)
return value | Converts a term to a serialized
Xapian value based on the field_type. | entailment |
def _from_xapian_value(value, field_type):
"""
Converts a serialized Xapian value
to Python equivalent based on the field_type.
Doesn't accept multivalued fields.
"""
assert field_type in FIELD_TYPES
if field_type == 'boolean':
if value == 't':
return True
elif value == 'f':
return False
else:
InvalidIndexError('Field type "%d" does not accept value "%s"' % (field_type, value))
elif field_type == 'integer':
return int(value)
elif field_type == 'float':
return xapian.sortable_unserialise(value)
elif field_type == 'date' or field_type == 'datetime':
datetime_value = datetime.datetime.strptime(value, DATETIME_FORMAT)
if field_type == 'datetime':
return datetime_value
else:
return datetime_value.date()
else: # field_type == 'text'
return value | Converts a serialized Xapian value
to Python equivalent based on the field_type.
Doesn't accept multivalued fields. | entailment |
def _update_cache(self):
"""
To avoid build_schema every time, we cache
some values: they only change when a SearchIndex
changes, which typically restarts the Python.
"""
fields = connections[self.connection_alias].get_unified_index().all_searchfields()
if self._fields != fields:
self._fields = fields
self._content_field_name, self._schema = self.build_schema(self._fields) | To avoid build_schema every time, we cache
some values: they only change when a SearchIndex
changes, which typically restarts the Python. | entailment |
def update(self, index, iterable, commit=True):
"""
Updates the `index` with any objects in `iterable` by adding/updating
the database as needed.
Required arguments:
`index` -- The `SearchIndex` to process
`iterable` -- An iterable of model instances to index
Optional arguments:
`commit` -- ignored
For each object in `iterable`, a document is created containing all
of the terms extracted from `index.full_prepare(obj)` with field prefixes,
and 'as-is' as needed. Also, if the field type is 'text' it will be
stemmed and stored with the 'Z' prefix as well.
eg. `content:Testing` ==> `testing, Ztest, ZXCONTENTtest, XCONTENTtest`
Each document also contains an extra term in the format:
`XCONTENTTYPE<app_name>.<model_name>`
As well as a unique identifier in the the format:
`Q<app_name>.<model_name>.<pk>`
eg.: foo.bar (pk=1) ==> `Qfoo.bar.1`, `XCONTENTTYPEfoo.bar`
This is useful for querying for a specific document corresponding to
a model instance.
The document also contains a pickled version of the object itself and
the document ID in the document data field.
Finally, we also store field values to be used for sorting data. We
store these in the document value slots (position zero is reserver
for the document ID). All values are stored as unicode strings with
conversion of float, int, double, values being done by Xapian itself
through the use of the :method:xapian.sortable_serialise method.
"""
database = self._database(writable=True)
try:
term_generator = xapian.TermGenerator()
term_generator.set_database(database)
term_generator.set_stemmer(xapian.Stem(self.language))
try:
term_generator.set_stemming_strategy(self.stemming_strategy)
except AttributeError:
# Versions before Xapian 1.2.11 do not support stemming strategies for TermGenerator
pass
if self.include_spelling is True:
term_generator.set_flags(xapian.TermGenerator.FLAG_SPELLING)
def _add_text(termpos, text, weight, prefix=''):
"""
indexes text appending 2 extra terms
to identify beginning and ending of the text.
"""
term_generator.set_termpos(termpos)
start_term = '%s^' % prefix
end_term = '%s$' % prefix
# add begin
document.add_posting(start_term, termpos, weight)
# add text
term_generator.index_text(text, weight, prefix)
termpos = term_generator.get_termpos()
# add ending
termpos += 1
document.add_posting(end_term, termpos, weight)
# increase termpos
term_generator.set_termpos(termpos)
term_generator.increase_termpos(TERMPOS_DISTANCE)
return term_generator.get_termpos()
def _add_literal_text(termpos, text, weight, prefix=''):
"""
Adds sentence to the document with positional information
but without processing.
The sentence is bounded by "^" "$" to allow exact matches.
"""
text = '^ %s $' % text
for word in text.split():
term = '%s%s' % (prefix, word)
document.add_posting(term, termpos, weight)
termpos += 1
termpos += TERMPOS_DISTANCE
return termpos
def add_text(termpos, prefix, text, weight):
"""
Adds text to the document with positional information
and processing (e.g. stemming).
"""
termpos = _add_text(termpos, text, weight, prefix=prefix)
termpos = _add_text(termpos, text, weight, prefix='')
termpos = _add_literal_text(termpos, text, weight, prefix=prefix)
termpos = _add_literal_text(termpos, text, weight, prefix='')
return termpos
def _get_ngram_lengths(value):
values = value.split()
for item in values:
for ngram_length in six.moves.range(NGRAM_MIN_LENGTH, NGRAM_MAX_LENGTH + 1):
yield item, ngram_length
for obj in iterable:
document = xapian.Document()
term_generator.set_document(document)
def ngram_terms(value):
for item, length in _get_ngram_lengths(value):
item_length = len(item)
for start in six.moves.range(0, item_length - length + 1):
for size in six.moves.range(length, length + 1):
end = start + size
if end > item_length:
continue
yield _to_xapian_term(item[start:end])
def edge_ngram_terms(value):
for item, length in _get_ngram_lengths(value):
yield _to_xapian_term(item[0:length])
def add_edge_ngram_to_document(prefix, value, weight):
"""
Splits the term in ngrams and adds each ngram to the index.
The minimum and maximum size of the ngram is respectively
NGRAM_MIN_LENGTH and NGRAM_MAX_LENGTH.
"""
for term in edge_ngram_terms(value):
document.add_term(term, weight)
document.add_term(prefix + term, weight)
def add_ngram_to_document(prefix, value, weight):
"""
Splits the term in ngrams and adds each ngram to the index.
The minimum and maximum size of the ngram is respectively
NGRAM_MIN_LENGTH and NGRAM_MAX_LENGTH.
"""
for term in ngram_terms(value):
document.add_term(term, weight)
document.add_term(prefix + term, weight)
def add_non_text_to_document(prefix, term, weight):
"""
Adds term to the document without positional information
and without processing.
If the term is alone, also adds it as "^<term>$"
to allow exact matches on single terms.
"""
document.add_term(term, weight)
document.add_term(prefix + term, weight)
def add_datetime_to_document(termpos, prefix, term, weight):
"""
Adds a datetime to document with positional order
to allow exact matches on it.
"""
date, time = term.split()
document.add_posting(date, termpos, weight)
termpos += 1
document.add_posting(time, termpos, weight)
termpos += 1
document.add_posting(prefix + date, termpos, weight)
termpos += 1
document.add_posting(prefix + time, termpos, weight)
termpos += TERMPOS_DISTANCE + 1
return termpos
data = index.full_prepare(obj)
weights = index.get_field_weights()
termpos = term_generator.get_termpos() # identifies the current position in the document.
for field in self.schema:
if field['field_name'] not in list(data.keys()):
# not supported fields are ignored.
continue
if field['field_name'] in weights:
weight = int(weights[field['field_name']])
else:
weight = 1
value = data[field['field_name']]
if field['field_name'] in (ID, DJANGO_ID, DJANGO_CT):
# Private fields are indexed in a different way:
# `django_id` is an int and `django_ct` is text;
# besides, they are indexed by their (unstemmed) value.
if field['field_name'] == DJANGO_ID:
value = int(value)
value = _term_to_xapian_value(value, field['type'])
document.add_term(TERM_PREFIXES[field['field_name']] + value, weight)
document.add_value(field['column'], value)
continue
else:
prefix = TERM_PREFIXES['field'] + field['field_name'].upper()
# if not multi_valued, we add as a document value
# for sorting and facets
if field['multi_valued'] == 'false':
document.add_value(field['column'], _term_to_xapian_value(value, field['type']))
else:
for t in value:
# add the exact match of each value
term = _to_xapian_term(t)
termpos = add_text(termpos, prefix, term, weight)
continue
term = _to_xapian_term(value)
if term == '':
continue
# from here on the term is a string;
# we now decide how it is indexed
if field['type'] == 'text':
# text is indexed with positional information
termpos = add_text(termpos, prefix, term, weight)
elif field['type'] == 'datetime':
termpos = add_datetime_to_document(termpos, prefix, term, weight)
elif field['type'] == 'ngram':
add_ngram_to_document(prefix, value, weight)
elif field['type'] == 'edge_ngram':
add_edge_ngram_to_document(prefix, value, weight)
else:
# all other terms are added without positional information
add_non_text_to_document(prefix, term, weight)
# store data without indexing it
document.set_data(pickle.dumps(
(obj._meta.app_label, obj._meta.model_name, obj.pk, data),
pickle.HIGHEST_PROTOCOL
))
# add the id of the document
document_id = TERM_PREFIXES[ID] + get_identifier(obj)
document.add_term(document_id)
# finally, replace or add the document to the database
database.replace_document(document_id, document)
except UnicodeDecodeError:
sys.stderr.write('Chunk failed.\n')
pass
finally:
database.close() | Updates the `index` with any objects in `iterable` by adding/updating
the database as needed.
Required arguments:
`index` -- The `SearchIndex` to process
`iterable` -- An iterable of model instances to index
Optional arguments:
`commit` -- ignored
For each object in `iterable`, a document is created containing all
of the terms extracted from `index.full_prepare(obj)` with field prefixes,
and 'as-is' as needed. Also, if the field type is 'text' it will be
stemmed and stored with the 'Z' prefix as well.
eg. `content:Testing` ==> `testing, Ztest, ZXCONTENTtest, XCONTENTtest`
Each document also contains an extra term in the format:
`XCONTENTTYPE<app_name>.<model_name>`
As well as a unique identifier in the the format:
`Q<app_name>.<model_name>.<pk>`
eg.: foo.bar (pk=1) ==> `Qfoo.bar.1`, `XCONTENTTYPEfoo.bar`
This is useful for querying for a specific document corresponding to
a model instance.
The document also contains a pickled version of the object itself and
the document ID in the document data field.
Finally, we also store field values to be used for sorting data. We
store these in the document value slots (position zero is reserver
for the document ID). All values are stored as unicode strings with
conversion of float, int, double, values being done by Xapian itself
through the use of the :method:xapian.sortable_serialise method. | entailment |
def remove(self, obj, commit=True):
"""
Remove indexes for `obj` from the database.
We delete all instances of `Q<app_name>.<model_name>.<pk>` which
should be unique to this object.
Optional arguments:
`commit` -- ignored
"""
database = self._database(writable=True)
database.delete_document(TERM_PREFIXES[ID] + get_identifier(obj))
database.close() | Remove indexes for `obj` from the database.
We delete all instances of `Q<app_name>.<model_name>.<pk>` which
should be unique to this object.
Optional arguments:
`commit` -- ignored | entailment |
def clear(self, models=(), commit=True):
"""
Clear all instances of `models` from the database or all models, if
not specified.
Optional Arguments:
`models` -- Models to clear from the database (default = [])
If `models` is empty, an empty query is executed which matches all
documents in the database. Afterwards, each match is deleted.
Otherwise, for each model, a `delete_document` call is issued with
the term `XCONTENTTYPE<app_name>.<model_name>`. This will delete
all documents with the specified model type.
"""
if not models:
# Because there does not appear to be a "clear all" method,
# it's much quicker to remove the contents of the `self.path`
# folder than it is to remove each document one at a time.
if os.path.exists(self.path):
shutil.rmtree(self.path)
else:
database = self._database(writable=True)
for model in models:
database.delete_document(TERM_PREFIXES[DJANGO_CT] + get_model_ct(model))
database.close() | Clear all instances of `models` from the database or all models, if
not specified.
Optional Arguments:
`models` -- Models to clear from the database (default = [])
If `models` is empty, an empty query is executed which matches all
documents in the database. Afterwards, each match is deleted.
Otherwise, for each model, a `delete_document` call is issued with
the term `XCONTENTTYPE<app_name>.<model_name>`. This will delete
all documents with the specified model type. | entailment |
def _build_models_query(self, query):
"""
Builds a query from `query` that filters to documents only from registered models.
"""
registered_models_ct = self.build_models_list()
if registered_models_ct:
restrictions = [xapian.Query('%s%s' % (TERM_PREFIXES[DJANGO_CT], model_ct))
for model_ct in registered_models_ct]
limit_query = xapian.Query(xapian.Query.OP_OR, restrictions)
query = xapian.Query(xapian.Query.OP_AND, query, limit_query)
return query | Builds a query from `query` that filters to documents only from registered models. | entailment |
def _check_field_names(self, field_names):
"""
Raises InvalidIndexError if any of a field_name in field_names is
not indexed.
"""
if field_names:
for field_name in field_names:
try:
self.column[field_name]
except KeyError:
raise InvalidIndexError('Trying to use non indexed field "%s"' % field_name) | Raises InvalidIndexError if any of a field_name in field_names is
not indexed. | entailment |
def search(self, query, sort_by=None, start_offset=0, end_offset=None,
fields='', highlight=False, facets=None, date_facets=None,
query_facets=None, narrow_queries=None, spelling_query=None,
limit_to_registered_models=None, result_class=None, **kwargs):
"""
Executes the Xapian::query as defined in `query`.
Required arguments:
`query` -- Search query to execute
Optional arguments:
`sort_by` -- Sort results by specified field (default = None)
`start_offset` -- Slice results from `start_offset` (default = 0)
`end_offset` -- Slice results at `end_offset` (default = None), if None, then all documents
`fields` -- Filter results on `fields` (default = '')
`highlight` -- Highlight terms in results (default = False)
`facets` -- Facet results on fields (default = None)
`date_facets` -- Facet results on date ranges (default = None)
`query_facets` -- Facet results on queries (default = None)
`narrow_queries` -- Narrow queries (default = None)
`spelling_query` -- An optional query to execute spelling suggestion on
`limit_to_registered_models` -- Limit returned results to models registered in
the current `SearchSite` (default = True)
Returns:
A dictionary with the following keys:
`results` -- A list of `SearchResult`
`hits` -- The total available results
`facets` - A dictionary of facets with the following keys:
`fields` -- A list of field facets
`dates` -- A list of date facets
`queries` -- A list of query facets
If faceting was not used, the `facets` key will not be present
If `query` is None, returns no results.
If `INCLUDE_SPELLING` was enabled in the connection options, the
extra flag `FLAG_SPELLING_CORRECTION` will be passed to the query parser
and any suggestions for spell correction will be returned as well as
the results.
"""
if xapian.Query.empty(query):
return {
'results': [],
'hits': 0,
}
self._check_field_names(facets)
self._check_field_names(date_facets)
self._check_field_names(query_facets)
database = self._database()
if limit_to_registered_models is None:
limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
if result_class is None:
result_class = SearchResult
if self.include_spelling is True:
spelling_suggestion = self._do_spelling_suggestion(database, query, spelling_query)
else:
spelling_suggestion = ''
if narrow_queries is not None:
query = xapian.Query(
xapian.Query.OP_AND, query, xapian.Query(
xapian.Query.OP_AND, [self.parse_query(narrow_query) for narrow_query in narrow_queries]
)
)
if limit_to_registered_models:
query = self._build_models_query(query)
enquire = xapian.Enquire(database)
if hasattr(settings, 'HAYSTACK_XAPIAN_WEIGHTING_SCHEME'):
enquire.set_weighting_scheme(xapian.BM25Weight(*settings.HAYSTACK_XAPIAN_WEIGHTING_SCHEME))
enquire.set_query(query)
if sort_by:
try:
_xapian_sort(enquire, sort_by, self.column)
except NotSupportedError:
_old_xapian_sort(enquire, sort_by, self.column)
results = []
facets_dict = {
'fields': {},
'dates': {},
'queries': {},
}
if not end_offset:
end_offset = database.get_doccount() - start_offset
## prepare spies in case of facets
if facets:
facets_spies = self._prepare_facet_field_spies(facets)
for spy in facets_spies:
enquire.add_matchspy(spy)
# print enquire.get_query()
matches = self._get_enquire_mset(database, enquire, start_offset, end_offset)
for match in matches:
app_label, model_name, pk, model_data = pickle.loads(self._get_document_data(database, match.document))
if highlight:
model_data['highlighted'] = {
self.content_field_name: self._do_highlight(
model_data.get(self.content_field_name), query
)
}
results.append(
result_class(app_label, model_name, pk, match.percent, **model_data)
)
if facets:
# pick single valued facets from spies
single_facets_dict = self._process_facet_field_spies(facets_spies)
# pick multivalued valued facets from results
multi_facets_dict = self._do_multivalued_field_facets(results, facets)
# merge both results (http://stackoverflow.com/a/38990/931303)
facets_dict['fields'] = dict(list(single_facets_dict.items()) + list(multi_facets_dict.items()))
if date_facets:
facets_dict['dates'] = self._do_date_facets(results, date_facets)
if query_facets:
facets_dict['queries'] = self._do_query_facets(results, query_facets)
return {
'results': results,
'hits': self._get_hit_count(database, enquire),
'facets': facets_dict,
'spelling_suggestion': spelling_suggestion,
} | Executes the Xapian::query as defined in `query`.
Required arguments:
`query` -- Search query to execute
Optional arguments:
`sort_by` -- Sort results by specified field (default = None)
`start_offset` -- Slice results from `start_offset` (default = 0)
`end_offset` -- Slice results at `end_offset` (default = None), if None, then all documents
`fields` -- Filter results on `fields` (default = '')
`highlight` -- Highlight terms in results (default = False)
`facets` -- Facet results on fields (default = None)
`date_facets` -- Facet results on date ranges (default = None)
`query_facets` -- Facet results on queries (default = None)
`narrow_queries` -- Narrow queries (default = None)
`spelling_query` -- An optional query to execute spelling suggestion on
`limit_to_registered_models` -- Limit returned results to models registered in
the current `SearchSite` (default = True)
Returns:
A dictionary with the following keys:
`results` -- A list of `SearchResult`
`hits` -- The total available results
`facets` - A dictionary of facets with the following keys:
`fields` -- A list of field facets
`dates` -- A list of date facets
`queries` -- A list of query facets
If faceting was not used, the `facets` key will not be present
If `query` is None, returns no results.
If `INCLUDE_SPELLING` was enabled in the connection options, the
extra flag `FLAG_SPELLING_CORRECTION` will be passed to the query parser
and any suggestions for spell correction will be returned as well as
the results. | entailment |
def more_like_this(self, model_instance, additional_query=None,
start_offset=0, end_offset=None,
limit_to_registered_models=True, result_class=None, **kwargs):
"""
Given a model instance, returns a result set of similar documents.
Required arguments:
`model_instance` -- The model instance to use as a basis for
retrieving similar documents.
Optional arguments:
`additional_query` -- An additional query to narrow results
`start_offset` -- The starting offset (default=0)
`end_offset` -- The ending offset (default=None), if None, then all documents
`limit_to_registered_models` -- Limit returned results to models registered in the search (default = True)
Returns:
A dictionary with the following keys:
`results` -- A list of `SearchResult`
`hits` -- The total available results
Opens a database connection, then builds a simple query using the
`model_instance` to build the unique identifier.
For each document retrieved(should always be one), adds an entry into
an RSet (relevance set) with the document id, then, uses the RSet
to query for an ESet (A set of terms that can be used to suggest
expansions to the original query), omitting any document that was in
the original query.
Finally, processes the resulting matches and returns.
"""
database = self._database()
if result_class is None:
result_class = SearchResult
query = xapian.Query(TERM_PREFIXES[ID] + get_identifier(model_instance))
enquire = xapian.Enquire(database)
enquire.set_query(query)
rset = xapian.RSet()
if not end_offset:
end_offset = database.get_doccount()
match = None
for match in self._get_enquire_mset(database, enquire, 0, end_offset):
rset.add_document(match.docid)
if match is None:
if not self.silently_fail:
raise InvalidIndexError('Instance %s with id "%d" not indexed' %
(get_identifier(model_instance), model_instance.id))
else:
return {'results': [],
'hits': 0}
query = xapian.Query(
xapian.Query.OP_ELITE_SET,
[expand.term for expand in enquire.get_eset(match.document.termlist_count(), rset, XHExpandDecider())],
match.document.termlist_count()
)
query = xapian.Query(
xapian.Query.OP_AND_NOT, [query, TERM_PREFIXES[ID] + get_identifier(model_instance)]
)
if limit_to_registered_models:
query = self._build_models_query(query)
if additional_query:
query = xapian.Query(
xapian.Query.OP_AND, query, additional_query
)
enquire.set_query(query)
results = []
matches = self._get_enquire_mset(database, enquire, start_offset, end_offset)
for match in matches:
app_label, model_name, pk, model_data = pickle.loads(self._get_document_data(database, match.document))
results.append(
result_class(app_label, model_name, pk, match.percent, **model_data)
)
return {
'results': results,
'hits': self._get_hit_count(database, enquire),
'facets': {
'fields': {},
'dates': {},
'queries': {},
},
'spelling_suggestion': None,
} | Given a model instance, returns a result set of similar documents.
Required arguments:
`model_instance` -- The model instance to use as a basis for
retrieving similar documents.
Optional arguments:
`additional_query` -- An additional query to narrow results
`start_offset` -- The starting offset (default=0)
`end_offset` -- The ending offset (default=None), if None, then all documents
`limit_to_registered_models` -- Limit returned results to models registered in the search (default = True)
Returns:
A dictionary with the following keys:
`results` -- A list of `SearchResult`
`hits` -- The total available results
Opens a database connection, then builds a simple query using the
`model_instance` to build the unique identifier.
For each document retrieved(should always be one), adds an entry into
an RSet (relevance set) with the document id, then, uses the RSet
to query for an ESet (A set of terms that can be used to suggest
expansions to the original query), omitting any document that was in
the original query.
Finally, processes the resulting matches and returns. | entailment |
def parse_query(self, query_string):
"""
Given a `query_string`, will attempt to return a xapian.Query
Required arguments:
``query_string`` -- A query string to parse
Returns a xapian.Query
"""
if query_string == '*':
return xapian.Query('') # Match everything
elif query_string == '':
return xapian.Query() # Match nothing
qp = xapian.QueryParser()
qp.set_database(self._database())
qp.set_stemmer(xapian.Stem(self.language))
qp.set_stemming_strategy(self.stemming_strategy)
qp.set_default_op(XAPIAN_OPTS[DEFAULT_OPERATOR])
qp.add_boolean_prefix(DJANGO_CT, TERM_PREFIXES[DJANGO_CT])
for field_dict in self.schema:
# since 'django_ct' has a boolean_prefix,
# we ignore it here.
if field_dict['field_name'] == DJANGO_CT:
continue
qp.add_prefix(
field_dict['field_name'],
TERM_PREFIXES['field'] + field_dict['field_name'].upper()
)
vrp = XHValueRangeProcessor(self)
qp.add_valuerangeprocessor(vrp)
return qp.parse_query(query_string, self.flags) | Given a `query_string`, will attempt to return a xapian.Query
Required arguments:
``query_string`` -- A query string to parse
Returns a xapian.Query | entailment |
def build_schema(self, fields):
"""
Build the schema from fields.
:param fields: A list of fields in the index
:returns: list of dictionaries
Each dictionary has the keys
field_name: The name of the field index
type: what type of value it is
'multi_valued': if it allows more than one value
'column': a number identifying it
'type': the type of the field
'multi_valued': 'false', 'column': 0}
"""
content_field_name = ''
schema_fields = [
{'field_name': ID,
'type': 'text',
'multi_valued': 'false',
'column': 0},
{'field_name': DJANGO_ID,
'type': 'integer',
'multi_valued': 'false',
'column': 1},
{'field_name': DJANGO_CT,
'type': 'text',
'multi_valued': 'false',
'column': 2},
]
self._columns[ID] = 0
self._columns[DJANGO_ID] = 1
self._columns[DJANGO_CT] = 2
column = len(schema_fields)
for field_name, field_class in sorted(list(fields.items()), key=lambda n: n[0]):
if field_class.document is True:
content_field_name = field_class.index_fieldname
if field_class.indexed is True:
field_data = {
'field_name': field_class.index_fieldname,
'type': 'text',
'multi_valued': 'false',
'column': column,
}
if field_class.field_type == 'date':
field_data['type'] = 'date'
elif field_class.field_type == 'datetime':
field_data['type'] = 'datetime'
elif field_class.field_type == 'integer':
field_data['type'] = 'integer'
elif field_class.field_type == 'float':
field_data['type'] = 'float'
elif field_class.field_type == 'boolean':
field_data['type'] = 'boolean'
elif field_class.field_type == 'ngram':
field_data['type'] = 'ngram'
elif field_class.field_type == 'edge_ngram':
field_data['type'] = 'edge_ngram'
if field_class.is_multivalued:
field_data['multi_valued'] = 'true'
schema_fields.append(field_data)
self._columns[field_data['field_name']] = column
column += 1
return content_field_name, schema_fields | Build the schema from fields.
:param fields: A list of fields in the index
:returns: list of dictionaries
Each dictionary has the keys
field_name: The name of the field index
type: what type of value it is
'multi_valued': if it allows more than one value
'column': a number identifying it
'type': the type of the field
'multi_valued': 'false', 'column': 0} | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.