INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Drops any existing work tables as returned by: meth: ~giraffez. load. TeradataBulkLoad. tables.
def cleanup(self): """ Drops any existing work tables, as returned by :meth:`~giraffez.load.TeradataBulkLoad.tables`. :raises `giraffez.TeradataPTError`: if a Teradata error ocurred """ threads = [] for i, table in enumerate(filter(lambda x: self.mload.exists(x), self.tables)): log.info("BulkLoad", "Dropping table '{}'...".format(table)) t = threading.Thread(target=self.mload.drop_table, args=(table,)) threads.append(t) t.start() for t in threads: t.join()
Finishes the load job. Called automatically when the connection closes.
def finish(self): """ Finishes the load job. Called automatically when the connection closes. :return: The exit code returned when applying rows to the table """ if self.finished: return self.exit_code checkpoint_status = self.checkpoint() self.exit_code = self._exit_code() if self.exit_code != 0: raise TeradataPTError("BulkLoad job finished with return code '{}'".format(self.exit_code)) # TODO(chris): should this happen every time? if self.applied_count > 0: self._end_acquisition() self._apply_rows() self.exit_code = self._exit_code() if self.exit_code != 0: raise TeradataPTError("BulkLoad job finished with return code '{}'".format(self.exit_code)) self.finished = True return self.exit_code
Load from a file into the target table handling each step of the load process.
def from_file(self, filename, table=None, delimiter='|', null='NULL', panic=True, quotechar='"', parse_dates=False): """ Load from a file into the target table, handling each step of the load process. Can load from text files, and properly formatted giraffez archive files. In both cases, if Gzip compression is detected the file will be decompressed while reading and handled appropriately. The encoding is determined automatically by the contents of the file. It is not necessary to set the columns in use prior to loading from a file. In the case of a text file, the header is used to determine column names and their order. Valid delimiters include '|', ',', and '\\t' (tab). When loading an archive file, the column information is decoded alongside the data. :param str filename: The location of the file to be loaded :param str table: The name of the target table, if it was not specified to the constructor for the isntance :param str null: The string that indicates a null value in the rows being inserted from a file. Defaults to 'NULL' :param str delimiter: When loading a file, indicates that fields are separated by this delimiter. Defaults to :code:`None`, which causes the delimiter to be determined from the header of the file. In most cases, this behavior is sufficient :param str quotechar: The character used to quote fields containing special characters, like the delimiter. :param bool panic: If :code:`True`, when an error is encountered it will be raised. Otherwise, the error will be logged and :code:`self.error_count` is incremented. :return: The output of the call to :meth:`~giraffez.load.TeradataBulkLoad.finish` :raises `giraffez.errors.GiraffeError`: if table was not set and :code:`table` is :code:`None`, or if a Teradata error ocurred while retrieving table info. :raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there are format errors in the row values. """ if not self.table: if not table: raise GiraffeError("Table must be set or specified to load a file.") self.table = table if not isinstance(null, basestring): raise GiraffeError("Expected 'null' to be str, received {}".format(type(null))) with Reader(filename, delimiter=delimiter, quotechar=quotechar) as f: if not isinstance(f.delimiter, basestring): raise GiraffeError("Expected 'delimiter' to be str, received {}".format(type(delimiter))) self.columns = f.header if isinstance(f, ArchiveFileReader): self.mload.set_encoding(ROW_ENCODING_RAW) self.preprocessor = lambda s: s if parse_dates: self.preprocessor = DateHandler(self.columns) self._initiate() self.mload.set_null(null) self.mload.set_delimiter(delimiter) i = 0 for i, line in enumerate(f, 1): self.put(line, panic=panic) if i % self.checkpoint_interval == 1: log.info("\rBulkLoad", "Processed {} rows".format(i), console=True) checkpoint_status = self.checkpoint() self.exit_code = self._exit_code() if self.exit_code != 0: return self.exit_code log.info("\rBulkLoad", "Processed {} rows".format(i)) return self.finish()
Load a single row into the target table.
def put(self, items, panic=True): """ Load a single row into the target table. :param list items: A list of values in the row corresponding to the fields specified by :code:`self.columns` :param bool panic: If :code:`True`, when an error is encountered it will be raised. Otherwise, the error will be logged and :code:`self.error_count` is incremented. :raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there are format errors in the row values. :raises `giraffez.errors.GiraffeError`: if table name is not set. :raises `giraffez.TeradataPTError`: if there is a problem connecting to Teradata. """ if not self.initiated: self._initiate() try: row_status = self.mload.put_row(self.preprocessor(items)) self.applied_count += 1 except (TeradataPTError, EncoderError) as error: self.error_count += 1 if panic: raise error log.info("BulkLoad", error)
Attempt release of target mload table.
def release(self): """ Attempt release of target mload table. :raises `giraffez.errors.GiraffeError`: if table was not set by the constructor, the :code:`TeradataBulkLoad.table`, or :meth:`~giraffez.load.TeradataBulkLoad.from_file`. """ if self.table is None: raise GiraffeError("Cannot release. Target table has not been set.") log.info("BulkLoad", "Attempting release for table {}".format(self.table)) self.mload.release(self.table)
The names of the work tables used for loading.
def tables(self): """ The names of the work tables used for loading. :return: A list of four tables, each the name of the target table with the added suffixes, "_wt", "_log", "_e1", and "_e2" :raises `giraffez.errors.GiraffeError`: if table was not set by the constructor, the :code:`TeradataBulkLoad.table`, or :meth:`~giraffez.load.TeradataBulkLoad.from_file`. """ if self.table is None: raise GiraffeError("Target table has not been set.") return [ "{}_wt".format(self.table), "{}_log".format(self.table), "{}_e1".format(self.table), "{}_e2".format(self.table), ]
Monkey - patch compiler to allow for removal of default compiler flags.
def fix_compile(remove_flags): """ Monkey-patch compiler to allow for removal of default compiler flags. """ import distutils.ccompiler def _fix_compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None): for flag in remove_flags: if flag in self.compiler_so: self.compiler_so.remove(flag) macros, objects, extra_postargs, pp_opts, build = self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs) cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) for obj in objects: try: src, ext = build[obj] except KeyError: continue self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) return objects distutils.ccompiler.CCompiler.compile = _fix_compile
Attempts to find the Teradata install directory with the defaults for a given platform. Should always return None when the defaults are not present and the TERADATA_HOME environment variable wasn t explicitly set to the correct install location.
def find_teradata_home(): """ Attempts to find the Teradata install directory with the defaults for a given platform. Should always return `None` when the defaults are not present and the TERADATA_HOME environment variable wasn't explicitly set to the correct install location. """ if platform.system() == 'Windows': # The default installation path for Windows is split between the # Windows directories for 32-bit/64-bit applications. It is # worth noting that Teradata archiecture installed should match # the architecture of the Python architecture being used (i.e. # TTU 32-bit is required /w Python 32-bit and TTU 64-bit is # required for Python 64-bit). if is_64bit(): return latest_teradata_version("C:/Program Files/Teradata/Client") else: return latest_teradata_version("C:/Program Files (x86)/Teradata/Client") elif platform.system() == 'Linux': return latest_teradata_version("/opt/teradata/client") elif platform.system() == 'Darwin': return latest_teradata_version("/Library/Application Support/teradata/client") else: # In the case nothing is found, the default for Linux is # attempted as a last effort to find the correct install # directory. return latest_teradata_version("/opt/teradata/client")
Retrieve the decrypted value of a key in a giraffez configuration file.
def get(self, key): """ Retrieve the decrypted value of a key in a giraffez configuration file. :param str key: The key used to lookup the encrypted value """ if not key.startswith("secure.") and not key.startswith("connections."): key = "secure.{0}".format(key) value = self.config.get_value(key) if not isinstance(value, basestring): value = None return value
Set a decrypted value by key in a giraffez configuration file.
def set(self, key, value): """ Set a decrypted value by key in a giraffez configuration file. :param str key: The key used to lookup the encrypted value :param value: Value to set at the given key, can be any value that is YAML serializeable. """ if not key.startswith("secure."): key = "secure.{0}".format(key) self.config.set_value(key, value) self.config.write()
Display results in table format
def do_table(self, line): """Display results in table format""" if len(line) > 0: if line.strip().lower() == "on": log.write("Table ON") self.table_output = True return elif line.strip().lower() == "off": log.write("Table OFF") self.table_output = False return log.write("Table output: {}".format("ON" if self.table_output else "OFF"))
Sets the current encoder output to Python dict and returns the cursor. This makes it possible to set the output encoding and iterate over the results:
def to_dict(self): """ Sets the current encoder output to Python `dict` and returns the cursor. This makes it possible to set the output encoding and iterate over the results: .. code-block:: python with giraffez.Cmd() as cmd: for row in cmd.execute(query).to_dict(): print(row) Or can be passed as a parameter to an object that consumes an iterator: .. code-block:: python result = cmd.execute(query) list(result.to_dict()) """ self.conn.set_encoding(ROW_ENCODING_DICT) self.processor = lambda x, y: y return self
Set the current encoder output to: class: giraffez. Row objects and returns the cursor. This is the default value so it is not necessary to select this unless the encoder settings have been changed already.
def to_list(self): """ Set the current encoder output to :class:`giraffez.Row` objects and returns the cursor. This is the default value so it is not necessary to select this unless the encoder settings have been changed already. """ self.conn.set_encoding(ROW_ENCODING_LIST) self.processor = lambda x, y: Row(x, y) return self
Execute commands using CLIv2.
def execute(self, command, coerce_floats=True, parse_dates=False, header=False, sanitize=True, silent=False, panic=None, multi_statement=False, prepare_only=False): """ Execute commands using CLIv2. :param str command: The SQL command to be executed :param bool coerce_floats: Coerce Teradata decimal types into Python floats :param bool parse_dates: Parses Teradata datetime types into Python datetimes :param bool header: Include row header :param bool sanitize: Whether or not to call :func:`~giraffez.sql.prepare_statement` on the command :param bool silent: Silence console logging (within this function only) :param bool panic: If :code:`True`, when an error is encountered it will be raised. :param bool multi_statement: Execute in multi-statement mode :param bool prepare_only: Only prepare the command (no results) :return: a cursor over the results of each statement in the command :rtype: :class:`~giraffez.cmd.Cursor` :raises `giraffez.TeradataError`: if the query is invalid :raises `giraffez.errors.GiraffeError`: if the return data could not be decoded """ if panic is None: panic = self.panic self.options("panic", panic) self.options("multi-statement mode", multi_statement, 3) if isfile(command): self.options("file", command, 2) with open(command, 'r') as f: command = f.read() else: if log.level >= VERBOSE: self.options("query", command, 2) else: self.options("query", truncate(command), 2) if not silent and not self.silent: log.info("Command", "Executing ...") log.info(self.options) if sanitize: command = prepare_statement(command) # accounts for comments and newlines log.debug("Debug[2]", "Command (sanitized): {!r}".format(command)) self.cmd.set_encoding(ENCODER_SETTINGS_DEFAULT) return Cursor(self.cmd, command, multi_statement=multi_statement, header=header, prepare_only=prepare_only, coerce_floats=coerce_floats, parse_dates=parse_dates, panic=panic)
Check that object ( table or view ): code: object_name exists by executing a: code: show table object_name query followed by a: code: show view object_name query if: code: object_name is not a table.
def exists(self, object_name, silent=False): """ Check that object (table or view) :code:`object_name` exists, by executing a :code:`show table object_name` query, followed by a :code:`show view object_name` query if :code:`object_name` is not a table. :param str object_name: The name of the object to check for existence. :param bool silent: Silence console logging (within this function only) :return: :code:`True` if the object exists, :code:`False` otherwise. :rtype: bool """ try: self.execute("show table {}".format(object_name), silent=silent) return True except TeradataError as error: if error.code != TD_ERROR_OBJECT_NOT_TABLE: return False try: self.execute("show view {}".format(object_name), silent=silent) return True except TeradataError as error: if error.code not in [TD_ERROR_OBJECT_NOT_VIEW, TD_ERROR_OBJECT_NOT_EXIST]: return True return False
Return the column information for: code: table_name by executing a: code: select top 1 * from table_name query.
def fetch_columns(self, table_name, silent=False): """ Return the column information for :code:`table_name` by executing a :code:`select top 1 * from table_name` query. :param str table_name: The fully-qualified name of the table to retrieve schema for :param bool silent: Silence console logging (within this function only) :return: the columns of the table :rtype: :class:`~giraffez.types.Columns` """ return self.execute("select top 1 * from {}".format(table_name), silent=silent, prepare_only=True).columns
Load a text file into the specified: code: table_name or Insert Python: code: list rows into the specified: code: table_name
def insert(self, table_name, rows, fields=None, delimiter=None, null='NULL', parse_dates=False, quotechar='"'): """ Load a text file into the specified :code:`table_name` or Insert Python :code:`list` rows into the specified :code:`table_name` :param str table_name: The name of the destination table :param list/str rows: A list of rows **or** the name of an input file. Each row must be a :code:`list` of field values. :param list fields: The names of the target fields, in the order that the data will be presented (defaults to :code:`None` for all columns in the table). :param str delimiter: The delimiter used by the input file (or :code:`None` to infer it from the header). :param str null: The string used to indicated nulled values in the file (defaults to :code:`'NULL'`). :param str quotechar: The character used to quote fields containing special characters, like the delimiter. :param bool parse_dates: If :code:`True`, attempts to coerce date fields into a standard format (defaults to :code:`False`). :raises `giraffez.errors.GiraffeEncodeError`: if the number of values in a row does not match the length of :code:`fields` :raises `giraffez.errors.GiraffeError`: if :code:`panic` is set and the insert statement caused an error. :return: A dictionary containing counts of applied rows and errors :rtype: :class:`dict` For most insertions, this will be faster and produce less strain on Teradata than using :class:`~giraffez.load.TeradataBulkLoad` (:class:`giraffez.BulkLoad <giraffez.load.TeradataBulkLoad>`). Requires that any input file be a properly delimited text file, with a header that corresponds to the target fields for insertion. Valid delimiters include '|', ',', and <tab> or a properly encoded JSON stream. """ if not isfile(rows): return self._insert(table_name, rows, fields, parse_dates) with Reader(rows, delimiter=delimiter, quotechar=quotechar) as f: preprocessor = null_handler(null) rows = (preprocessor(l) for l in f) if isinstance(f, CSVReader): self.options("delimiter", unescape_string(f.reader.dialect.delimiter), 1) self.options("quote char", f.reader.dialect.quotechar, 2) elif isinstance(f, JSONReader): self.options("encoding", "json", 1) return self._insert(table_name, rows, f.header, parse_dates)
Return a: code: dict of connections from the configuration settings.
def connections(self): """ Return a :code:`dict` of connections from the configuration settings. :raises `giraffez.errors.ConfigurationError`: if connections are not present """ if "connections" not in self.settings: raise ConfigurationError("Could not retrieve connections from config file '{}'.".format(self._config_file)) return self.settings.get("connections")
Retrieve a connection by the given: code: dsn or the default connection.
def get_connection(self, dsn=None): """ Retrieve a connection by the given :code:`dsn`, or the default connection. :param str dsn: The name of the connection to retrieve. Defaults to :code:`None`, which retrieves the default connection. :return: A dict of connection settings :raises `giraffez.errors.ConfigurationError`: if :code:`dsn` does not exist and a :raises `giraffez.errors.ConnectionLock`: if the corresponding connection is currently locked default is not set. """ if dsn is None: dsn = self.connections.get("default", None) if dsn is None: raise ConfigurationError("No default DSN set") connection = self.connections.get(dsn, None) if connection is None: raise ConfigurationError("DSN '{}' does not exist".format(dsn)) connection = self.decrypt(connection.copy()) if connection.get('lock', 0) > 1: raise ConnectionLock(dsn) connection['name'] = dsn return connection
Retrieve a value from the configuration based on its key. The key may be nested.
def get_value(self, key, default={}, nested=True, decrypt=True): """ Retrieve a value from the configuration based on its key. The key may be nested. :param str key: A path to the value, with nested levels joined by '.' :param default: Value to return if the key does not exist (defaults to :code:`dict()`) :param bool decrypt: If :code:`True`, decrypt an encrypted value before returning (if encrypted). Defaults to :code:`True`. """ key = key.lstrip() if key.endswith("."): key = key[:-1] if nested: path = key.split(".") curr = self.settings for p in path[:-1]: curr = curr.get(p, {}) try: value = curr[path[-1]] except KeyError: return default value = self.decrypt(value, path) return value else: return self.settings.get(key, default)
Return the contents of the configuration as a: code: dict. Depending on the structure of the YAML settings the return value may contain nested: code: dict objects.
def list_value(self, decrypt=False): """ Return the contents of the configuration as a :code:`dict`. Depending on the structure of the YAML settings, the return value may contain nested :code:`dict` objects. :param bool decrypt: If :code:`True`, decrypt the contents before returning. :return: (potentially) nested :code:`dict` of keys and values, as parsed from the configuration file YAML contents. """ if decrypt: settings = self.decrypt(self.settings) else: settings = self.settings return yaml.dump(settings, default_flow_style=False)
A class method to lock a connection ( given by: code: dsn ) in the specified configuration file. Automatically opens the file and writes to it before closing.
def lock_connection(cls, conf, dsn, key=None): """ A class method to lock a connection (given by :code:`dsn`) in the specified configuration file. Automatically opens the file and writes to it before closing. :param str conf: The configuration file to modify :param str dsn: The name of the connection to lock :raises `giraffez.errors.ConfigurationError`: if the connection does not exist """ with Config(conf, "w", key) as c: connection = c.get_connection(dsn) if not connection: raise ConfigurationError("Unable to lock connection") if dsn is None: dsn = c.settings["connections"]["default"] value = "connections.{}.lock".format(dsn) lock = c.get_value("connections.{}.lock".format(dsn), default=0) if lock >= 2: raise ConnectionLock(dsn) lock += 1 c.set_value("connections.{}.lock".format(dsn), lock) c.write()
Set a value within the configuration based on its key. The key may be nested any nested levels that do not exist prior to the final segment of the key path will be created. * Note *: In order to write changes to the file ensure that: meth: ~giraffez. config. Config. write is called prior to exit.
def set_value(self, key, value): """ Set a value within the configuration based on its key. The key may be nested, any nested levels that do not exist prior to the final segment of the key path will be created. *Note*: In order to write changes to the file, ensure that :meth:`~giraffez.config.Config.write` is called prior to exit. :param str key: A path to the value destination, with nested levels joined by '.' :param value: Value to set at the given key, can be any value that is YAML serializeable. """ if key.endswith("."): key = key[:-1] path = key.split(".") curr = self.settings for p in path[:-1]: if p not in curr: curr[p] = {} curr = curr[p] if not isinstance(curr, dict): raise ConfigurationError("Cannot set nested key '{}' in configuration value '{}' (destination is not a dictionary).".format(path[-1], key)) value = self.encrypt(value, path) if value in {'true', 'True'}: value = True if value in {'false', 'False'}: value = False curr[path[-1]] = value
A class method to unlock a connection ( given by: code: dsn ) in the specified configuration file. Automatically opens the file and writes to it before closing.
def unlock_connection(cls, conf, dsn, key=None): """ A class method to unlock a connection (given by :code:`dsn`) in the specified configuration file. Automatically opens the file and writes to it before closing. :param str conf: The configuration file to modify :param str dsn: The name of the connection to unlock :raises `giraffez.errors.ConfigurationError`: if the connection does not exist """ with Config(conf, "w", key) as c: connection = c.connections.get(dsn, None) if not connection: raise ConfigurationError("Unable to unlock connection") if dsn is None: dsn = c.settings["connections"]["default"] if connection.get("lock", None) is None: raise GiraffeError("Connection '{}' is not locked.".format(dsn)) c.unset_value("connections.{}.lock".format(dsn)) c.write()
Remove a value at the given key -- and any nested values -- from the configuration. * Note *: In order to write changes to the file ensure that: meth: ~giraffez. config. Config. write is called prior to exit.
def unset_value(self, key): """ Remove a value at the given key -- and any nested values -- from the configuration. *Note*: In order to write changes to the file, ensure that :meth:`~giraffez.config.Config.write` is called prior to exit. :param str key: A path to the value destination, with nested levels joined by '.' :raises `giraffez.errors.ConfigurationError`: if the key specifies an invalid path, or does not exist """ if key.endswith("."): key = key[:-1] path = key.split(".") curr = self.settings for p in path[:-1]: if p not in curr: raise ConfigurationError("Cannot unset '{}', nested key '{}' does not exist.".format(key, p)) curr = curr[p] if not isinstance(curr, dict): raise ConfigurationError("Cannot unset nested key '{}' in configuration value '{}'.".format(path[-1], key)) if path[-1] not in curr: raise ConfigurationError("Cannot unset '{}', nested key '{}' does not exist.".format(key, path[-1])) del curr[path[-1]]
Save the current configuration to its file ( as given by: code: self. _config_file ). Optionally settings may be passed in to override the current settings before writing. Returns: code: None if the file could not be written to either due to permissions or if the: class: ~giraffez. config. Config object has the: code: mode r.
def write(self, settings=None): """ Save the current configuration to its file (as given by :code:`self._config_file`). Optionally, settings may be passed in to override the current settings before writing. Returns :code:`None` if the file could not be written to, either due to permissions, or if the :class:`~giraffez.config.Config` object has the :code:`mode` 'r'. :param dict settings: Defaults to :code:`None`, if not :code:`None` this will replace `self.settings` prior to writing to the file """ if "r" in self.mode: raise ConfigReadOnly("Cannot write Config while in 'r' mode") try: if settings: self.settings = settings with open(self._config_file, "w") as f: f.write(repr(self)) return repr(self) except OSError: return None
A class method to write a default configuration file structure to a file. Note that the contents of the file will be overwritten if it already exists.
def write_default(self, conf=None): """ A class method to write a default configuration file structure to a file. Note that the contents of the file will be overwritten if it already exists. :param str conf: The name of the file to write to. Defaults to :code:`None`, for ~/.girafferc :return: The content written to the file :rtype: str """ if conf is None: conf = home_file(".girafferc") contents = yaml.dump(default_config, default_flow_style=False) with open(conf, "w") as f: f.write(contents) os.chmod(conf, 0o600) return contents
Retrieve a column from the list with name value: code: column_name
def get(self, column_name): """ Retrieve a column from the list with name value :code:`column_name` :param str column_name: The name of the column to get :return: :class:`~giraffez.types.Column` with the specified name, or :code:`None` if it does not exist. """ column_name = column_name.lower() for c in self.columns: if c.name == column_name: return c return None
Set the names of columns to be used when iterating through the list retrieving names etc.
def set_filter(self, names=None): """ Set the names of columns to be used when iterating through the list, retrieving names, etc. :param list names: A list of names to be used, or :code:`None` for all """ _names = [] if names: for name in names: _safe_name = safe_name(name) if _safe_name not in self._column_map: raise GiraffeTypeError("Column '{}' does not exist".format(name)) if _safe_name in _names: continue _names.append(_safe_name) self._filtered_columns = _names
Serializes the columns into the giraffez archive header binary format::
def serialize(self): """ Serializes the columns into the giraffez archive header binary format:: 0 1 2 +------+------+------+------+------+------+------+------+ | Header | Header Data | | Length | | +------+------+------+------+------+------+------+------+ giraffez Archive Header Format Fig. 1 Header Length: 2 bytes Full length of archive header Header Data: variable Binary data representing N column(s) using the format specified in Fig. 2 0 1 2 3 4 5 6 7 8 +------+------+------+------+------+------+------+------+ | Type | Length | Precision | Scale | +------+------+------+------+------+------+------+------+ | Name Length | Name | +------+------+------+------+------+------+------+------+ Binary Column Format Fig. 2 Type: 2 bytes Numerical representation of column type defined by Teradata's CLIv2 Length: 2 bytes Column length Scale: 2 bytes Column scale Precision: 2 bytes Column precision Name Length: 2 bytes Length used for reading variable column name Name: variable Name of column :return: Packed binary data, representing the serialized :class:`~giraffez.types.Columns` :rtype: str """ data = b"" for column in self: row = struct.pack("5H", column.type, column.length, column.precision, column.scale, len(column.name)) row += ensure_bytes(column.name) data += row return struct.pack("H", len(data)) + data
Deserializes giraffez Archive header. See: meth: ~giraffez. types. Columns. serialize for more information.
def deserialize(cls, data): """ Deserializes giraffez Archive header. See :meth:`~giraffez.types.Columns.serialize` for more information. :param str data: data in giraffez Archive format, to be deserialized :return: :class:`~giraffez.types.Columns` object decoded from data """ column_list = cls() while data: tup, data = data[:10], data[10:] column_type, length, prec, scale, title_len = struct.unpack("5H", tup) title, data = data[:title_len], data[title_len:] try: column_list.append((title, column_type, length, prec, scale)) except GiraffeTypeError as error: raise GiraffeEncodeError(error) return column_list
Represents the contents of the row as a: code: dict with the column names as keys and the row s fields as values.
def items(self): """ Represents the contents of the row as a :code:`dict` with the column names as keys, and the row's fields as values. :rtype: dict """ return {k.name: v for k, v in zip(self.columns, self)}
Set the query to be run and initiate the connection with Teradata. Only necessary if the query/ table name was not specified as an argument to the constructor of the instance.
def query(self, query): """ Set the query to be run and initiate the connection with Teradata. Only necessary if the query/table name was not specified as an argument to the constructor of the instance. :param str query: Valid SQL query to be executed """ if query is None: return if log.level >= VERBOSE: self.options("query", query, 6) else: self.options("query", truncate(query), 6) statements = parse_statement(remove_curly_quotes(query)) if not statements: raise GiraffeError("Unable to parse SQL statement") if len(statements) > 1: show_warning(("MORE THAN ONE STATEMENT RECEIVED, EXPORT OPERATIONS ALLOW ONE " "STATEMENT - ONLY THE FIRST STATEMENT WILL BE USED."), RuntimeWarning) statement = statements[0] log.debug("Debug[2]", "Statement (sanitized): {!r}".format(statement)) if not (statement.startswith("select ") or statement.startswith("sel ")): statement = "select * from {}".format(statement) if statement == self.query: return else: self._query = statement self.initiated = False # Since CLIv2 is used in set_query (instead of relying on the # colums from the TPT Export driver) and set_query will always # happen before calls to initiate, set_query will always fail # with InvalidCredentialsError before initiate despite initiate # presumably failing after this point as well. try: self.export.set_query(statement) except InvalidCredentialsError as error: if self.protect: Config.lock_connection(self.config, self.dsn, self.key_file) raise error
Writes export archive files in the Giraffez archive format. This takes a giraffez. io. Writer and writes archive chunks to file until all rows for a given statement have been exhausted.
def to_archive(self, writer): """ Writes export archive files in the Giraffez archive format. This takes a `giraffez.io.Writer` and writes archive chunks to file until all rows for a given statement have been exhausted. .. code-block:: python with giraffez.BulkExport("database.table_name") as export: with giraffez.Writer("database.table_name.tar.gz", 'wb', use_gzip=True) as out: for n in export.to_archive(out): print("Rows: {}".format(n)) :param `giraffez.io.Writer` writer: A writer handling the archive output :rtype: iterator (yields ``int``) """ if 'b' not in writer.mode: raise GiraffeError("Archive writer must be in binary mode") writer.write(GIRAFFE_MAGIC) writer.write(self.columns.serialize()) i = 0 for n, chunk in enumerate(self._fetchall(ROW_ENCODING_RAW), 1): writer.write(chunk) yield TeradataEncoder.count(chunk)
Sets the current encoder output to Python str and returns a row iterator.
def to_str(self, delimiter='|', null='NULL'): """ Sets the current encoder output to Python `str` and returns a row iterator. :param str null: The string representation of null values :param str delimiter: The string delimiting values in the output string :rtype: iterator (yields ``str``) """ self.export.set_null(null) self.export.set_delimiter(delimiter) self.options("delimiter", escape_string(delimiter), 2) self.options("null", null, 3) return self._fetchall(ENCODER_SETTINGS_STRING, coerce_floats=False)
Convert string with optional k M G T multiplier to float
def float_with_multiplier(string): """Convert string with optional k, M, G, T multiplier to float""" match = re_float_with_multiplier.search(string) if not match or not match.group('num'): raise ValueError('String "{}" is not numeric!'.format(string)) num = float(match.group('num')) multi = match.group('multi') if multi: try: num *= multipliers[multi] except KeyError: raise ValueError('Unknown multiplier: {}'.format(multi)) return num
Convert string with gains of individual amplification elements to dict
def specific_gains(string): """Convert string with gains of individual amplification elements to dict""" if not string: return {} gains = {} for gain in string.split(','): amp_name, value = gain.split('=') gains[amp_name.strip()] = float(value.strip()) return gains
Convert string with SoapySDR device settings to dict
def device_settings(string): """Convert string with SoapySDR device settings to dict""" if not string: return {} settings = {} for setting in string.split(','): setting_name, value = setting.split('=') settings[setting_name.strip()] = value.strip() return settings
Wrap text to terminal width with default indentation
def wrap(text, indent=' '): """Wrap text to terminal width with default indentation""" wrapper = textwrap.TextWrapper( width=int(os.environ.get('COLUMNS', 80)), initial_indent=indent, subsequent_indent=indent ) return '\n'.join(wrapper.wrap(text))
Returns detected SoapySDR devices
def detect_devices(soapy_args=''): """Returns detected SoapySDR devices""" devices = simplesoapy.detect_devices(soapy_args, as_string=True) text = [] text.append('Detected SoapySDR devices:') if devices: for i, d in enumerate(devices): text.append(' {}'.format(d)) else: text.append(' No devices found!') return (devices, '\n'.join(text))
Returns info about selected SoapySDR device
def device_info(soapy_args=''): """Returns info about selected SoapySDR device""" text = [] try: device = simplesoapy.SoapyDevice(soapy_args) text.append('Selected device: {}'.format(device.hardware)) text.append(' Available RX channels:') text.append(' {}'.format(', '.join(str(x) for x in device.list_channels()))) text.append(' Available antennas:') text.append(' {}'.format(', '.join(device.list_antennas()))) text.append(' Available tunable elements:') text.append(' {}'.format(', '.join(device.list_frequencies()))) text.append(' Available amplification elements:') text.append(' {}'.format(', '.join(device.list_gains()))) text.append(' Available device settings:') for key, s in device.list_settings().items(): text.append(wrap('{} ... {} - {} (default: {})'.format(key, s['name'], s['description'], s['value']))) text.append(' Available stream arguments:') for key, s in device.list_stream_args().items(): text.append(wrap('{} ... {} - {} (default: {})'.format(key, s['name'], s['description'], s['value']))) text.append(' Allowed gain range [dB]:') text.append(' {:.2f} - {:.2f}'.format(*device.get_gain_range())) text.append(' Allowed frequency range [MHz]:') text.append(' {:.2f} - {:.2f}'.format(*[x / 1e6 for x in device.get_frequency_range()])) text.append(' Allowed sample rates [MHz]:') rates = [] for r in device.list_sample_rates(): if r[0] == r[1]: rates.append('{:.2f}'.format(r[0] / 1e6)) else: rates.append('{:.2f} - {:.2f}'.format(r[0] / 1e6, r[1] / 1e6)) text.append(wrap(', '.join(rates))) text.append(' Allowed bandwidths [MHz]:') bandwidths = [] for b in device.list_bandwidths(): if b[0] == b[1]: bandwidths.append('{:.2f}'.format(b[0] / 1e6)) else: bandwidths.append('{:.2f} - {:.2f}'.format(b[0] / 1e6, b[1] / 1e6)) if bandwidths: text.append(wrap(', '.join(bandwidths))) else: text.append(' N/A') except RuntimeError: device = None text.append('No devices found!') return (device, '\n'.join(text))
Setup command line parser
def setup_argument_parser(): """Setup command line parser""" # Fix help formatter width if 'COLUMNS' not in os.environ: os.environ['COLUMNS'] = str(shutil.get_terminal_size().columns) parser = argparse.ArgumentParser( prog='soapy_power', formatter_class=argparse.RawDescriptionHelpFormatter, description='Obtain a power spectrum from SoapySDR devices', add_help=False ) # Fix recognition of optional argements of type float_with_multiplier parser._negative_number_matcher = re_float_with_multiplier_negative main_title = parser.add_argument_group('Main options') main_title.add_argument('-h', '--help', action='help', help='show this help message and exit') main_title.add_argument('-f', '--freq', metavar='Hz|Hz:Hz', type=freq_or_freq_range, default='1420405752', help='center frequency or frequency range to scan, number ' 'can be followed by a k, M or G multiplier (default: %(default)s)') output_group = main_title.add_mutually_exclusive_group() output_group.add_argument('-O', '--output', metavar='FILE', type=argparse.FileType('w'), default=sys.stdout, help='output to file (incompatible with --output-fd, default is stdout)') output_group.add_argument('--output-fd', metavar='NUM', type=int, default=None, help='output to existing file descriptor (incompatible with -O)') main_title.add_argument('-F', '--format', choices=sorted(writer.formats.keys()), default='rtl_power', help='output format (default: %(default)s)') main_title.add_argument('-q', '--quiet', action='store_true', help='limit verbosity') main_title.add_argument('--debug', action='store_true', help='detailed debugging messages') main_title.add_argument('--detect', action='store_true', help='detect connected SoapySDR devices and exit') main_title.add_argument('--info', action='store_true', help='show info about selected SoapySDR device and exit') main_title.add_argument('--version', action='version', version='%(prog)s {}'.format(__version__)) bins_title = parser.add_argument_group('FFT bins') bins_group = bins_title.add_mutually_exclusive_group() bins_group.add_argument('-b', '--bins', type=int, default=512, help='number of FFT bins (incompatible with -B, default: %(default)s)') bins_group.add_argument('-B', '--bin-size', metavar='Hz', type=float_with_multiplier, help='bin size in Hz (incompatible with -b)') spectra_title = parser.add_argument_group('Averaging') spectra_group = spectra_title.add_mutually_exclusive_group() spectra_group.add_argument('-n', '--repeats', type=int, default=1600, help='number of spectra to average (incompatible with -t and -T, default: %(default)s)') spectra_group.add_argument('-t', '--time', metavar='SECONDS', type=float, help='integration time (incompatible with -T and -n)') spectra_group.add_argument('-T', '--total-time', metavar='SECONDS', type=float, help='total integration time of all hops (incompatible with -t and -n)') runs_title = parser.add_argument_group('Measurements') runs_group = runs_title.add_mutually_exclusive_group() runs_group.add_argument('-c', '--continue', dest='endless', action='store_true', help='repeat the measurement endlessly (incompatible with -u and -e)') runs_group.add_argument('-u', '--runs', type=int, default=1, help='number of measurements (incompatible with -c and -e, default: %(default)s)') runs_group.add_argument('-e', '--elapsed', metavar='SECONDS', type=float, help='scan session duration (time limit in seconds, incompatible with -c and -u)') device_title = parser.add_argument_group('Device settings') device_title.add_argument('-d', '--device', default='', help='SoapySDR device to use') device_title.add_argument('-C', '--channel', type=int, default=0, help='SoapySDR RX channel (default: %(default)s)') device_title.add_argument('-A', '--antenna', default='', help='SoapySDR selected antenna') device_title.add_argument('-r', '--rate', metavar='Hz', type=float_with_multiplier, default=2e6, help='sample rate (default: %(default)s)') device_title.add_argument('-w', '--bandwidth', metavar='Hz', type=float_with_multiplier, default=0, help='filter bandwidth (default: %(default)s)') device_title.add_argument('-p', '--ppm', type=int, default=0, help='frequency correction in ppm') gain_group = device_title.add_mutually_exclusive_group() gain_group.add_argument('-g', '--gain', metavar='dB', type=float, default=37.2, help='total gain (incompatible with -G and -a, default: %(default)s)') gain_group.add_argument('-G', '--specific-gains', metavar='STRING', type=specific_gains, default='', help='specific gains of individual amplification elements ' '(incompatible with -g and -a, example: LNA=28,VGA=12,AMP=0') gain_group.add_argument('-a', '--agc', action='store_true', help='enable Automatic Gain Control (incompatible with -g and -G)') device_title.add_argument('--lnb-lo', metavar='Hz', type=float_with_multiplier, default=0, help='LNB LO frequency, negative for upconverters (default: %(default)s)') device_title.add_argument('--device-settings', metavar='STRING', type=device_settings, default='', help='SoapySDR device settings (example: biastee=true)') device_title.add_argument('--force-rate', action='store_true', help='ignore list of sample rates provided by device and allow any value') device_title.add_argument('--force-bandwidth', action='store_true', help='ignore list of filter bandwidths provided by device and allow any value') device_title.add_argument('--tune-delay', metavar='SECONDS', type=float, default=0, help='time to delay measurement after changing frequency (to avoid artifacts)') device_title.add_argument('--reset-stream', action='store_true', help='reset streaming after changing frequency (to avoid artifacts)') crop_title = parser.add_argument_group('Crop') crop_group = crop_title.add_mutually_exclusive_group() crop_group.add_argument('-o', '--overlap', metavar='PERCENT', type=float, default=0, help='percent of overlap when frequency hopping (incompatible with -k)') crop_group.add_argument('-k', '--crop', metavar='PERCENT', type=float, default=0, help='percent of crop when frequency hopping (incompatible with -o)') perf_title = parser.add_argument_group('Performance options') perf_title.add_argument('-s', '--buffer-size', type=int, default=0, help='base buffer size (number of samples, 0 = auto, default: %(default)s)') perf_title.add_argument('-S', '--max-buffer-size', type=int, default=0, help='maximum buffer size (number of samples, -1 = unlimited, 0 = auto, default: %(default)s)') fft_rules_group = perf_title.add_mutually_exclusive_group() fft_rules_group.add_argument('--even', action='store_true', help='use only even numbers of FFT bins') fft_rules_group.add_argument('--pow2', action='store_true', help='use only powers of 2 as number of FFT bins') perf_title.add_argument('--max-threads', metavar='NUM', type=int, default=0, help='maximum number of PSD threads (0 = auto, default: %(default)s)') perf_title.add_argument('--max-queue-size', metavar='NUM', type=int, default=0, help='maximum size of PSD work queue (-1 = unlimited, 0 = auto, default: %(default)s)') perf_title.add_argument('--no-pyfftw', action='store_true', help='don\'t use pyfftw library even if it is available (use scipy.fftpack or numpy.fft)') other_title = parser.add_argument_group('Other options') other_title.add_argument('-l', '--linear', action='store_true', help='linear power values instead of logarithmic') other_title.add_argument('-R', '--remove-dc', action='store_true', help='interpolate central point to cancel DC bias (useful only with boxcar window)') other_title.add_argument('-D', '--detrend', choices=['none', 'constant'], default='none', help='remove mean value from data to cancel DC bias (default: %(default)s)') other_title.add_argument('--fft-window', choices=['boxcar', 'hann', 'hamming', 'blackman', 'bartlett', 'kaiser', 'tukey'], default='hann', help='Welch\'s method window function (default: %(default)s)') other_title.add_argument('--fft-window-param', metavar='FLOAT', type=float, default=None, help='shape parameter of window function (required for kaiser and tukey windows)') other_title.add_argument('--fft-overlap', metavar='PERCENT', type=float, default=50, help='Welch\'s method overlap between segments (default: %(default)s)') return parser
Set center frequency and clear averaged PSD data
def set_center_freq(self, center_freq): """Set center frequency and clear averaged PSD data""" psd_state = { 'repeats': 0, 'freq_array': self._base_freq_array + self._lnb_lo + center_freq, 'pwr_array': None, 'update_lock': threading.Lock(), 'futures': [], } return psd_state
Return freqs and averaged PSD for given center frequency
def result(self, psd_state): """Return freqs and averaged PSD for given center frequency""" freq_array = numpy.fft.fftshift(psd_state['freq_array']) pwr_array = numpy.fft.fftshift(psd_state['pwr_array']) if self._crop_factor: crop_bins_half = round((self._crop_factor * self._bins) / 2) freq_array = freq_array[crop_bins_half:-crop_bins_half] pwr_array = pwr_array[crop_bins_half:-crop_bins_half] if psd_state['repeats'] > 1: pwr_array = pwr_array / psd_state['repeats'] if self._log_scale: pwr_array = 10 * numpy.log10(pwr_array) return (freq_array, pwr_array)
Wait for all PSD threads to finish and return result
def wait_for_result(self, psd_state): """Wait for all PSD threads to finish and return result""" if len(psd_state['futures']) > 1: concurrent.futures.wait(psd_state['futures']) elif psd_state['futures']: psd_state['futures'][0].result() return self.result(psd_state)
Compute PSD from samples and update average for given center frequency
def update(self, psd_state, samples_array): """Compute PSD from samples and update average for given center frequency""" freq_array, pwr_array = simplespectral.welch(samples_array, self._sample_rate, nperseg=self._bins, window=self._fft_window, noverlap=self._fft_overlap_bins, detrend=self._detrend) if self._remove_dc: pwr_array[0] = (pwr_array[1] + pwr_array[-1]) / 2 with psd_state['update_lock']: psd_state['repeats'] += 1 if psd_state['pwr_array'] is None: psd_state['pwr_array'] = pwr_array else: psd_state['pwr_array'] += pwr_array
Compute PSD from samples and update average for given center frequency ( asynchronously in another thread )
def update_async(self, psd_state, samples_array): """Compute PSD from samples and update average for given center frequency (asynchronously in another thread)""" future = self._executor.submit(self.update, psd_state, samples_array) future.add_done_callback(self._release_future_memory) psd_state['futures'].append(future) return future
Write PSD of one frequncy hop ( asynchronously in another thread )
def write_async(self, psd_data_or_future, time_start, time_stop, samples): """Write PSD of one frequncy hop (asynchronously in another thread)""" return self._executor.submit(self.write, psd_data_or_future, time_start, time_stop, samples)
Read data from file - like object
def read(self, f): """Read data from file-like object""" magic = f.read(len(self.magic)) if not magic: return None if magic != self.magic: raise ValueError('Magic bytes not found! Read data: {}'.format(magic)) header = self.header._make( self.header_struct.unpack(f.read(self.header_struct.size)) ) pwr_array = numpy.fromstring(f.read(header.size), dtype='float32') return (header, pwr_array)
Write data to file - like object
def write(self, f, time_start, time_stop, start, stop, step, samples, pwr_array): """Write data to file-like object""" f.write(self.magic) f.write(self.header_struct.pack( self.version, time_start, time_stop, start, stop, step, samples, pwr_array.nbytes )) #pwr_array.tofile(f) f.write(pwr_array.tobytes()) f.flush()
Write PSD of one frequency hop
def write(self, psd_data_or_future, time_start, time_stop, samples): """Write PSD of one frequency hop""" try: # Wait for result of future f_array, pwr_array = psd_data_or_future.result() except AttributeError: f_array, pwr_array = psd_data_or_future try: step = f_array[1] - f_array[0] self.formatter.write( self.output, time_start.timestamp(), time_stop.timestamp(), f_array[0], f_array[-1] + step, step, samples, pwr_array ) except Exception as e: logging.exception('Error writing to output file: {}'.format(e))
Write PSD of one frequency hop
def write(self, psd_data_or_future, time_start, time_stop, samples): """Write PSD of one frequency hop""" try: # Wait for result of future f_array, pwr_array = psd_data_or_future.result() except AttributeError: f_array, pwr_array = psd_data_or_future self.output.write('# soapy_power output\n') self.output.write('# Acquisition start: {}\n'.format(time_start)) self.output.write('# Acquisition end: {}\n'.format(time_stop)) self.output.write('#\n') self.output.write('# frequency [Hz] power spectral density [dB/Hz]\n') for f, pwr in zip(f_array, pwr_array): self.output.write('{} {}\n'.format(f, pwr)) self.output.write('\n') self.output.flush()
Write PSD of one frequency hop
def write(self, psd_data_or_future, time_start, time_stop, samples): """Write PSD of one frequency hop""" try: # Wait for result of future f_array, pwr_array = psd_data_or_future.result() except AttributeError: f_array, pwr_array = psd_data_or_future try: step = f_array[1] - f_array[0] row = [ time_stop.strftime('%Y-%m-%d'), time_stop.strftime('%H:%M:%S'), f_array[0], f_array[-1] + step, step, samples ] row += list(pwr_array) self.output.write('{}\n'.format(', '.join(str(x) for x in row))) self.output.flush() except Exception as e: logging.exception('Error writing to output file:')
Submits a callable to be executed with the given arguments.
def submit(self, fn, *args, **kwargs): """Submits a callable to be executed with the given arguments. Count maximum reached work queue size in ThreadPoolExecutor.max_queue_size_reached. """ future = super().submit(fn, *args, **kwargs) work_queue_size = self._work_queue.qsize() if work_queue_size > self.max_queue_size_reached: self.max_queue_size_reached = work_queue_size return future
Return nearest number of FFT bins ( even or power of two )
def nearest_bins(self, bins, even=False, pow2=False): """Return nearest number of FFT bins (even or power of two)""" if pow2: bins_log2 = math.log(bins, 2) if bins_log2 % 1 != 0: bins = 2**math.ceil(bins_log2) logger.warning('number of FFT bins should be power of two, changing to {}'.format(bins)) elif even: if bins % 2 != 0: bins = math.ceil(bins / 2) * 2 logger.warning('number of FFT bins should be even, changing to {}'.format(bins)) return bins
Return nearest overlap/ crop factor based on number of bins
def nearest_overlap(self, overlap, bins): """Return nearest overlap/crop factor based on number of bins""" bins_overlap = overlap * bins if bins_overlap % 2 != 0: bins_overlap = math.ceil(bins_overlap / 2) * 2 overlap = bins_overlap / bins logger.warning('number of overlapping FFT bins should be even, ' 'changing overlap/crop factor to {:.5f}'.format(overlap)) return overlap
Convert integration time to number of repeats
def time_to_repeats(self, bins, integration_time): """Convert integration time to number of repeats""" return math.ceil((self.device.sample_rate * integration_time) / bins)
Returns list of frequencies for frequency hopping
def freq_plan(self, min_freq, max_freq, bins, overlap=0, quiet=False): """Returns list of frequencies for frequency hopping""" bin_size = self.bins_to_bin_size(bins) bins_crop = round((1 - overlap) * bins) sample_rate_crop = (1 - overlap) * self.device.sample_rate freq_range = max_freq - min_freq hopping = True if freq_range >= sample_rate_crop else False hop_size = self.nearest_freq(sample_rate_crop, bin_size) hops = math.ceil(freq_range / hop_size) if hopping else 1 min_center_freq = min_freq + (hop_size / 2) if hopping else min_freq + (freq_range / 2) max_center_freq = min_center_freq + ((hops - 1) * hop_size) freq_list = [min_center_freq + (i * hop_size) for i in range(hops)] if not quiet: logger.info('overlap: {:.5f}'.format(overlap)) logger.info('bin_size: {:.2f} Hz'.format(bin_size)) logger.info('bins: {}'.format(bins)) logger.info('bins (after crop): {}'.format(bins_crop)) logger.info('sample_rate: {:.3f} MHz'.format(self.device.sample_rate / 1e6)) logger.info('sample_rate (after crop): {:.3f} MHz'.format(sample_rate_crop / 1e6)) logger.info('freq_range: {:.3f} MHz'.format(freq_range / 1e6)) logger.info('hopping: {}'.format('YES' if hopping else 'NO')) logger.info('hop_size: {:.3f} MHz'.format(hop_size / 1e6)) logger.info('hops: {}'.format(hops)) logger.info('min_center_freq: {:.3f} MHz'.format(min_center_freq / 1e6)) logger.info('max_center_freq: {:.3f} MHz'.format(max_center_freq / 1e6)) logger.info('min_freq (after crop): {:.3f} MHz'.format((min_center_freq - (hop_size / 2)) / 1e6)) logger.info('max_freq (after crop): {:.3f} MHz'.format((max_center_freq + (hop_size / 2)) / 1e6)) logger.debug('Frequency hops table:') logger.debug(' {:8s} {:8s} {:8s}'.format('Min:', 'Center:', 'Max:')) for f in freq_list: logger.debug(' {:8.3f} MHz {:8.3f} MHz {:8.3f} MHz'.format( (f - (self.device.sample_rate / 2)) / 1e6, f / 1e6, (f + (self.device.sample_rate / 2)) / 1e6, )) return freq_list
Create buffer for reading samples
def create_buffer(self, bins, repeats, base_buffer_size, max_buffer_size=0): """Create buffer for reading samples""" samples = bins * repeats buffer_repeats = 1 buffer_size = math.ceil(samples / base_buffer_size) * base_buffer_size if not max_buffer_size: # Max buffer size about 100 MB max_buffer_size = (100 * 1024**2) / 8 if max_buffer_size > 0: max_buffer_size = math.ceil(max_buffer_size / base_buffer_size) * base_buffer_size if buffer_size > max_buffer_size: logger.warning('Required buffer size ({}) will be shrinked to max_buffer_size ({})!'.format( buffer_size, max_buffer_size )) buffer_repeats = math.ceil(buffer_size / max_buffer_size) buffer_size = max_buffer_size logger.info('repeats: {}'.format(repeats)) logger.info('samples: {} (time: {:.5f} s)'.format(samples, samples / self.device.sample_rate)) if max_buffer_size > 0: logger.info('max_buffer_size (samples): {} (repeats: {:.2f}, time: {:.5f} s)'.format( max_buffer_size, max_buffer_size / bins, max_buffer_size / self.device.sample_rate )) else: logger.info('max_buffer_size (samples): UNLIMITED') logger.info('buffer_size (samples): {} (repeats: {:.2f}, time: {:.5f} s)'.format( buffer_size, buffer_size / bins, buffer_size / self.device.sample_rate )) logger.info('buffer_repeats: {}'.format(buffer_repeats)) return (buffer_repeats, zeros(buffer_size, numpy.complex64))
Prepare samples buffer and start streaming samples from device
def setup(self, bins, repeats, base_buffer_size=0, max_buffer_size=0, fft_window='hann', fft_overlap=0.5, crop_factor=0, log_scale=True, remove_dc=False, detrend=None, lnb_lo=0, tune_delay=0, reset_stream=False, max_threads=0, max_queue_size=0): """Prepare samples buffer and start streaming samples from device""" if self.device.is_streaming: self.device.stop_stream() base_buffer = self.device.start_stream(buffer_size=base_buffer_size) self._bins = bins self._repeats = repeats self._base_buffer_size = len(base_buffer) self._max_buffer_size = max_buffer_size self._buffer_repeats, self._buffer = self.create_buffer( bins, repeats, self._base_buffer_size, self._max_buffer_size ) self._tune_delay = tune_delay self._reset_stream = reset_stream self._psd = psd.PSD(bins, self.device.sample_rate, fft_window=fft_window, fft_overlap=fft_overlap, crop_factor=crop_factor, log_scale=log_scale, remove_dc=remove_dc, detrend=detrend, lnb_lo=lnb_lo, max_threads=max_threads, max_queue_size=max_queue_size) self._writer = writer.formats[self._output_format](self._output)
Stop streaming samples from device and delete samples buffer
def stop(self): """Stop streaming samples from device and delete samples buffer""" if not self.device.is_streaming: return self.device.stop_stream() self._writer.close() self._bins = None self._repeats = None self._base_buffer_size = None self._max_buffer_size = None self._buffer_repeats = None self._buffer = None self._tune_delay = None self._reset_stream = None self._psd = None self._writer = None
Tune to specified center frequency and compute Power Spectral Density
def psd(self, freq): """Tune to specified center frequency and compute Power Spectral Density""" if not self.device.is_streaming: raise RuntimeError('Streaming is not initialized, you must run setup() first!') # Tune to new frequency in main thread logger.debug(' Frequency hop: {:.2f} Hz'.format(freq)) t_freq = time.time() if self.device.freq != freq: # Deactivate streaming before tuning if self._reset_stream: self.device.device.deactivateStream(self.device.stream) # Actually tune to new center frequency self.device.freq = freq # Reactivate straming after tuning if self._reset_stream: self.device.device.activateStream(self.device.stream) # Delay reading samples after tuning if self._tune_delay: t_delay = time.time() while True: self.device.read_stream() t_delay_end = time.time() if t_delay_end - t_delay >= self._tune_delay: break logger.debug(' Tune delay: {:.3f} s'.format(t_delay_end - t_delay)) else: logger.debug(' Same frequency as before, tuning skipped') psd_state = self._psd.set_center_freq(freq) t_freq_end = time.time() logger.debug(' Tune time: {:.3f} s'.format(t_freq_end - t_freq)) for repeat in range(self._buffer_repeats): logger.debug(' Repeat: {}'.format(repeat + 1)) # Read samples from SDR in main thread t_acq = time.time() acq_time_start = datetime.datetime.utcnow() self.device.read_stream_into_buffer(self._buffer) acq_time_stop = datetime.datetime.utcnow() t_acq_end = time.time() logger.debug(' Acquisition time: {:.3f} s'.format(t_acq_end - t_acq)) # Start FFT computation in another thread self._psd.update_async(psd_state, numpy.copy(self._buffer)) t_final = time.time() if _shutdown: break psd_future = self._psd.result_async(psd_state) logger.debug(' Total hop time: {:.3f} s'.format(t_final - t_freq)) return (psd_future, acq_time_start, acq_time_stop)
Sweep spectrum using frequency hopping
def sweep(self, min_freq, max_freq, bins, repeats, runs=0, time_limit=0, overlap=0, fft_window='hann', fft_overlap=0.5, crop=False, log_scale=True, remove_dc=False, detrend=None, lnb_lo=0, tune_delay=0, reset_stream=False, base_buffer_size=0, max_buffer_size=0, max_threads=0, max_queue_size=0): """Sweep spectrum using frequency hopping""" self.setup( bins, repeats, base_buffer_size, max_buffer_size, fft_window=fft_window, fft_overlap=fft_overlap, crop_factor=overlap if crop else 0, log_scale=log_scale, remove_dc=remove_dc, detrend=detrend, lnb_lo=lnb_lo, tune_delay=tune_delay, reset_stream=reset_stream, max_threads=max_threads, max_queue_size=max_queue_size ) try: freq_list = self.freq_plan(min_freq - lnb_lo, max_freq - lnb_lo, bins, overlap) t_start = time.time() run = 0 while not _shutdown and (runs == 0 or run < runs): run += 1 t_run_start = time.time() logger.debug('Run: {}'.format(run)) for freq in freq_list: # Tune to new frequency, acquire samples and compute Power Spectral Density psd_future, acq_time_start, acq_time_stop = self.psd(freq) # Write PSD to stdout (in another thread) self._writer.write_async(psd_future, acq_time_start, acq_time_stop, len(self._buffer) * self._buffer_repeats) if _shutdown: break # Write end of measurement marker (in another thread) write_next_future = self._writer.write_next_async() t_run = time.time() logger.debug(' Total run time: {:.3f} s'.format(t_run - t_run_start)) # End measurement if time limit is exceeded if time_limit and (time.time() - t_start) >= time_limit: logger.info('Time limit of {} s exceeded, completed {} runs'.format(time_limit, run)) break # Wait for last write to be finished write_next_future.result() # Debug thread pool queues logging.debug('Number of USB buffer overflow errors: {}'.format(self.device.buffer_overflow_count)) logging.debug('PSD worker threads: {}'.format(self._psd._executor._max_workers)) logging.debug('Max. PSD queue size: {} / {}'.format(self._psd._executor.max_queue_size_reached, self._psd._executor.max_queue_size)) logging.debug('Writer worker threads: {}'.format(self._writer._executor._max_workers)) logging.debug('Max. Writer queue size: {} / {}'.format(self._writer._executor.max_queue_size_reached, self._writer._executor.max_queue_size)) finally: # Shutdown SDR self.stop() t_stop = time.time() logger.info('Total time: {:.3f} s'.format(t_stop - t_start))
close ()
def close(self): """close() Disconnects the object from the bus. """ os.close(self._fd) self._fd = -1 self._addr = -1 self._pec = 0
open ( bus )
def open(self, bus): """open(bus) Connects the object to the specified SMBus. """ bus = int(bus) path = "/dev/i2c-%d" % (bus,) if len(path) >= MAXPATH: raise OverflowError("Bus number is invalid.") try: self._fd = os.open(path, os.O_RDWR, 0) except OSError as e: raise IOError(e.errno)
private helper method
def _set_addr(self, addr): """private helper method""" if self._addr != addr: ioctl(self._fd, SMBUS.I2C_SLAVE, addr) self._addr = addr
write_quick ( addr )
def write_quick(self, addr): """write_quick(addr) Perform SMBus Quick transaction. """ self._set_addr(addr) if SMBUS.i2c_smbus_write_quick(self._fd, SMBUS.I2C_SMBUS_WRITE) != 0: raise IOError(ffi.errno)
read_byte ( addr ) - > result
def read_byte(self, addr): """read_byte(addr) -> result Perform SMBus Read Byte transaction. """ self._set_addr(addr) result = SMBUS.i2c_smbus_read_byte(self._fd) if result == -1: raise IOError(ffi.errno) return result
write_byte ( addr val )
def write_byte(self, addr, val): """write_byte(addr, val) Perform SMBus Write Byte transaction. """ self._set_addr(addr) if SMBUS.i2c_smbus_write_byte(self._fd, ffi.cast("__u8", val)) == -1: raise IOError(ffi.errno)
read_byte_data ( addr cmd ) - > result
def read_byte_data(self, addr, cmd): """read_byte_data(addr, cmd) -> result Perform SMBus Read Byte Data transaction. """ self._set_addr(addr) res = SMBUS.i2c_smbus_read_byte_data(self._fd, ffi.cast("__u8", cmd)) if res == -1: raise IOError(ffi.errno) return res
write_byte_data ( addr cmd val )
def write_byte_data(self, addr, cmd, val): """write_byte_data(addr, cmd, val) Perform SMBus Write Byte Data transaction. """ self._set_addr(addr) if SMBUS.i2c_smbus_write_byte_data(self._fd, ffi.cast("__u8", cmd), ffi.cast("__u8", val)) == -1: raise IOError(ffi.errno)
read_word_data ( addr cmd ) - > result
def read_word_data(self, addr, cmd): """read_word_data(addr, cmd) -> result Perform SMBus Read Word Data transaction. """ self._set_addr(addr) result = SMBUS.i2c_smbus_read_word_data(self._fd, ffi.cast("__u8", cmd)) if result == -1: raise IOError(ffi.errno) return result
write_word_data ( addr cmd val )
def write_word_data(self, addr, cmd, val): """write_word_data(addr, cmd, val) Perform SMBus Write Word Data transaction. """ self._set_addr(addr) if SMBUS.i2c_smbus_write_word_data(self._fd, ffi.cast("__u8", cmd), ffi.cast("__u16", val)) == -1: raise IOError(ffi.errno)
process_call ( addr cmd val )
def process_call(self, addr, cmd, val): """process_call(addr, cmd, val) Perform SMBus Process Call transaction. Note: although i2c_smbus_process_call returns a value, according to smbusmodule.c this method does not return a value by default. Set _compat = False on the SMBus instance to get a return value. """ self._set_addr(addr) ret = SMBUS.i2c_smbus_process_call(self._fd, ffi.cast("__u8", cmd), ffi.cast("__u16", val)) if ret == -1: raise IOError(ffi.errno) if self._compat: return ret
read_block_data ( addr cmd ) - > results
def read_block_data(self, addr, cmd): """read_block_data(addr, cmd) -> results Perform SMBus Read Block Data transaction. """ # XXX untested, the raspberry pi i2c driver does not support this # command self._set_addr(addr) data = ffi.new("union i2c_smbus_data *") if SMBUS.i2c_smbus_access(self._fd, int2byte(SMBUS.I2C_SMBUS_READ), ffi.cast("__u8", cmd), SMBUS.I2C_SMBUS_BLOCK_DATA, data): raise IOError(ffi.errno) return smbus_data_to_list(data)
write_block_data ( addr cmd vals )
def write_block_data(self, addr, cmd, vals): """write_block_data(addr, cmd, vals) Perform SMBus Write Block Data transaction. """ self._set_addr(addr) data = ffi.new("union i2c_smbus_data *") list_to_smbus_data(data, vals) if SMBUS.i2c_smbus_access(self._fd, int2byte(SMBUS.I2C_SMBUS_WRITE), ffi.cast("__u8", cmd), SMBUS.I2C_SMBUS_BLOCK_DATA, data): raise IOError(ffi.errno)
block_process_call ( addr cmd vals ) - > results
def block_process_call(self, addr, cmd, vals): """block_process_call(addr, cmd, vals) -> results Perform SMBus Block Process Call transaction. """ self._set_addr(addr) data = ffi.new("union i2c_smbus_data *") list_to_smbus_data(data, vals) if SMBUS.i2c_smbus_access(self._fd, SMBUS.I2C_SMBUS_WRITE, ffi.cast("__u8", cmd), SMBUS.I2C_SMBUS_BLOCK_PROC_CALL, data): raise IOError(ffi.errno) return smbus_data_to_list(data)
read_i2c_block_data ( addr cmd len = 32 ) - > results
def read_i2c_block_data(self, addr, cmd, len=32): """read_i2c_block_data(addr, cmd, len=32) -> results Perform I2C Block Read transaction. """ self._set_addr(addr) data = ffi.new("union i2c_smbus_data *") data.block[0] = len if len == 32: arg = SMBUS.I2C_SMBUS_I2C_BLOCK_BROKEN else: arg = SMBUS.I2C_SMBUS_I2C_BLOCK_DATA if SMBUS.i2c_smbus_access(self._fd, int2byte(SMBUS.I2C_SMBUS_READ), ffi.cast("__u8", cmd), arg, data): raise IOError(ffi.errno) return smbus_data_to_list(data)
True if Packet Error Codes ( PEC ) are enabled
def pec(self, value): """True if Packet Error Codes (PEC) are enabled""" pec = bool(value) if pec != self._pec: if ioctl(self._fd, SMBUS.I2C_PEC, pec): raise IOError(ffi.errno) self._pec = pec
Forcing to run cmake
def run_cmake(arg=""): """ Forcing to run cmake """ if ds.find_executable('cmake') is None: print "CMake is required to build zql" print "Please install cmake version >= 2.8 and re-run setup" sys.exit(-1) print "Configuring zql build with CMake.... " cmake_args = arg try: build_dir = op.join(op.split(__file__)[0], 'build') dd.mkpath(build_dir) os.chdir("build") ds.spawn(['cmake', '..'] + cmake_args.split()) ds.spawn(['make', 'clean']) ds.spawn(['make']) os.chdir("..") except ds.DistutilsExecError: print "Error while running cmake" print "run 'setup.py build --help' for build options" print "You may also try editing the settings in CMakeLists.txt file and re-running setup" sys.exit(-1)
Return the starting datetime: number of units before now.
def start(cls, now, number, **options): """ Return the starting datetime: ``number`` of units before ``now``. """ return (cls.mask(now, **options) - timedelta(**{cls.__name__.lower(): number - 1}))
Return a set of datetimes after filtering datetimes.
def filter(cls, datetimes, number, now=None, **options): """Return a set of datetimes, after filtering ``datetimes``. The result will be the ``datetimes`` which are ``number`` of units before ``now``, until ``now``, with approximately one unit between each of them. The first datetime for any unit is kept, later duplicates are removed. If there are ``datetimes`` after ``now``, they will be returned unfiltered. """ if not isinstance(number, int) or number < 0: raise ValueError('Invalid number: %s' % number) datetimes = tuple(datetimes) # Sample the first datetime to see if it is timezone-aware tzinfo = None if datetimes and datetimes[0].tzinfo is not None: tzinfo = UTC() if now is None: now = datetime.now(tzinfo) if not hasattr(now, 'second'): # now looks like a date, so convert it into a datetime now = datetime.combine(now, time(23, 59, 59, 999999, tzinfo=tzinfo)) # Always keep datetimes from the future future = set(dt for dt in datetimes if dt > now) if number == 0: return future # Don't consider datetimes from before the start start = cls.start(now, number, **options) valid = (dt for dt in datetimes if start <= dt <= now) # Deduplicate datetimes with the same mask() value by keeping # the oldest. kept = {} for dt in sorted(valid): kept.setdefault(cls.mask(dt, **options), dt) return set(kept.values()) | future
Return a datetime with the same value as dt to a resolution of days.
def mask(cls, dt, **options): """ Return a datetime with the same value as ``dt``, to a resolution of days. """ return dt.replace(hour=0, minute=0, second=0, microsecond=0)
Return the starting datetime: number of weeks before now.
def start(cls, now, number, firstweekday=calendar.SATURDAY, **options): """ Return the starting datetime: ``number`` of weeks before ``now``. ``firstweekday`` determines when the week starts. It defaults to Saturday. """ week = cls.mask(now, firstweekday=firstweekday, **options) days = (number - 1) * cls.DAYS_IN_WEEK return week - timedelta(days=days)
Return a datetime with the same value as dt to a resolution of weeks.
def mask(cls, dt, firstweekday=calendar.SATURDAY, **options): """ Return a datetime with the same value as ``dt``, to a resolution of weeks. ``firstweekday`` determines when the week starts. It defaults to Saturday. """ correction = (dt.weekday() - firstweekday) % cls.DAYS_IN_WEEK week = dt - timedelta(days=correction) return week.replace(hour=0, minute=0, second=0, microsecond=0)
Return the starting datetime: number of months before now.
def start(cls, now, number, **options): """ Return the starting datetime: ``number`` of months before ``now``. """ year = now.year month = now.month - number + 1 # Handle negative months if month < 0: year = year + (month // cls.MONTHS_IN_YEAR) month = month % cls.MONTHS_IN_YEAR # Handle December if month == 0: year = year - 1 month = 12 return cls.mask(now, **options).replace(year=year, month=month)
Return the starting datetime: number of years before now.
def start(cls, now, number, **options): """ Return the starting datetime: ``number`` of years before ``now``. """ return cls.mask(now).replace(year=(now.year - number + 1))
Return a set of datetimes that should be kept out of datetimes.
def to_keep(datetimes, years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0, firstweekday=SATURDAY, now=None): """ Return a set of datetimes that should be kept, out of ``datetimes``. Keeps up to ``years``, ``months``, ``weeks``, ``days``, ``hours``, ``minutes``, and ``seconds`` in the past. When keeping weeks, it prefers to keep ``firstweekday``, which defaults to Saturday. If ``now`` is None, it will base its calculations on ``datetime.datetime.now()``. Datetimes after this point will always be kept. """ datetimes = set(datetimes) return (filters.Years.filter(datetimes, number=years, now=now) | filters.Months.filter(datetimes, number=months, now=now) | filters.Weeks.filter(datetimes, number=weeks, firstweekday=firstweekday, now=now) | filters.Days.filter(datetimes, number=days, now=now) | filters.Hours.filter(datetimes, number=hours, now=now) | filters.Minutes.filter(datetimes, number=minutes, now=now) | filters.Seconds.filter(datetimes, number=seconds, now=now))
Return a set of datetimes that should be deleted out of datetimes.
def to_delete(datetimes, years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0, firstweekday=SATURDAY, now=None): """ Return a set of datetimes that should be deleted, out of ``datetimes``. See ``to_keep`` for a description of arguments. """ datetimes = set(datetimes) return datetimes - to_keep(datetimes, years=years, months=months, weeks=weeks, days=days, hours=hours, minutes=minutes, seconds=seconds, firstweekday=firstweekday, now=now)
Return a set of dates that should be kept out of dates.
def dates_to_keep(dates, years=0, months=0, weeks=0, days=0, firstweekday=SATURDAY, now=None): """ Return a set of dates that should be kept, out of ``dates``. See ``to_keep`` for a description of arguments. """ datetimes = to_keep((datetime.combine(d, time()) for d in dates), years=years, months=months, weeks=weeks, days=days, hours=0, minutes=0, seconds=0, firstweekday=firstweekday, now=now) return set(dt.date() for dt in datetimes)
Return a set of date that should be deleted out of dates.
def dates_to_delete(dates, years=0, months=0, weeks=0, days=0, firstweekday=SATURDAY, now=None): """ Return a set of date that should be deleted, out of ``dates``. See ``to_keep`` for a description of arguments. """ dates = set(dates) return dates - dates_to_keep(dates, years=years, months=months, weeks=weeks, days=days, firstweekday=firstweekday, now=now)
Returns an SPI control byte.
def _get_spi_control_byte(self, read_write_cmd): """Returns an SPI control byte. The MCP23S17 is a slave SPI device. The slave address contains four fixed bits and three user-defined hardware address bits (if enabled via IOCON.HAEN) (pins A2, A1 and A0) with the read/write bit filling out the control byte:: +--------------------+ |0|1|0|0|A2|A1|A0|R/W| +--------------------+ 7 6 5 4 3 2 1 0 :param read_write_cmd: Read or write command. :type read_write_cmd: int """ # board_addr_pattern = (self.hardware_addr & 0b111) << 1 board_addr_pattern = (self.hardware_addr << 1) & 0xE rw_cmd_pattern = read_write_cmd & 1 # make sure it's just 1 bit long return 0x40 | board_addr_pattern | rw_cmd_pattern
Returns the bit specified from the address.
def read_bit(self, bit_num, address): """Returns the bit specified from the address. :param bit_num: The bit number to read from. :type bit_num: int :param address: The address to read from. :type address: int :returns: int -- the bit value from the address """ value = self.read(address) bit_mask = get_bit_mask(bit_num) return 1 if value & bit_mask else 0
Writes the value given to the bit in the address specified.
def write_bit(self, value, bit_num, address): """Writes the value given to the bit in the address specified. :param value: The value to write. :type value: int :param bit_num: The bit number to write to. :type bit_num: int :param address: The address to write to. :type address: int """ bit_mask = get_bit_mask(bit_num) old_byte = self.read(address) # generate the new byte if value: new_byte = old_byte | bit_mask else: new_byte = old_byte & ~bit_mask self.write(new_byte, address)
Returns the lowest bit num from a given bit pattern. Returns None if no bits set.
def get_bit_num(bit_pattern): """Returns the lowest bit num from a given bit pattern. Returns None if no bits set. :param bit_pattern: The bit pattern. :type bit_pattern: int :returns: int -- the bit number :returns: None -- no bits set >>> pifacecommon.core.get_bit_num(0) None >>> pifacecommon.core.get_bit_num(0b1) 0 >>> pifacecommon.core.get_bit_num(0b11000) 3 """ if bit_pattern == 0: return None bit_num = 0 # assume bit 0 while (bit_pattern & 1) == 0: bit_pattern = bit_pattern >> 1 bit_num += 1 if bit_num > 7: bit_num = 0 break return bit_num
Waits for a port event. When a port event occurs it is placed onto the event queue.
def watch_port_events(port, chip, pin_function_maps, event_queue, return_after_kbdint=False): """Waits for a port event. When a port event occurs it is placed onto the event queue. :param port: The port we are waiting for interrupts on (GPIOA/GPIOB). :type port: int :param chip: The chip we are waiting for interrupts on. :type chip: :class:`pifacecommon.mcp23s17.MCP23S17` :param pin_function_maps: A list of classes that have inheritted from :class:`FunctionMap`\ s describing what to do with events. :type pin_function_maps: list :param event_queue: A queue to put events on. :type event_queue: :py:class:`multiprocessing.Queue` """ # set up epoll gpio25 = open(GPIO_INTERRUPT_DEVICE_VALUE, 'r') # change to use 'with'? epoll = select.epoll() epoll.register(gpio25, select.EPOLLIN | select.EPOLLET) while True: # wait here until input try: events = epoll.poll() except KeyboardInterrupt as e: if return_after_kbdint: return else: raise e except IOError as e: # ignore "Interrupted system call" error. # I don't really like this solution. Ignoring problems is bad! if e.errno != errno.EINTR: raise # find out where the interrupt came from and put it on the event queue if port == pifacecommon.mcp23s17.GPIOA: interrupt_flag = chip.intfa.value else: interrupt_flag = chip.intfb.value if interrupt_flag == 0: continue # The interrupt has not been flagged on this board else: if port == pifacecommon.mcp23s17.GPIOA: interrupt_capture = chip.intcapa.value else: interrupt_capture = chip.intcapb.value event_queue.add_event(InterruptEvent( interrupt_flag, interrupt_capture, chip, time.time())) epoll.close()
Waits for events on the event queue and calls the registered functions.
def handle_events( function_maps, event_queue, event_matches_function_map, terminate_signal): """Waits for events on the event queue and calls the registered functions. :param function_maps: A list of classes that have inheritted from :class:`FunctionMap`\ s describing what to do with events. :type function_maps: list :param event_queue: A queue to put events on. :type event_queue: :py:class:`multiprocessing.Queue` :param event_matches_function_map: A function that determines if the given event and :class:`FunctionMap` match. :type event_matches_function_map: function :param terminate_signal: The signal that, when placed on the event queue, causes this function to exit. """ while True: # print("HANDLE: Waiting for events!") event = event_queue.get() # print("HANDLE: It's an event!") if event == terminate_signal: return # if matching get the callback function, else function is None functions = map( lambda fm: fm.callback if event_matches_function_map(event, fm) else None, function_maps) # reduce to just the callback functions (remove None) # TODO: I think this can just be filter(None, functions) functions = filter(lambda f: f is not None, functions) for function in functions: function(event)
Bring the interrupt pin on the GPIO into Linux userspace.
def bring_gpio_interrupt_into_userspace(): # activate gpio interrupt """Bring the interrupt pin on the GPIO into Linux userspace.""" try: # is it already there? with open(GPIO_INTERRUPT_DEVICE_VALUE): return except IOError: # no, bring it into userspace with open(GPIO_EXPORT_FILE, 'w') as export_file: export_file.write(str(GPIO_INTERRUPT_PIN)) wait_until_file_exists(GPIO_INTERRUPT_DEVICE_VALUE)
Set the interrupt edge on the userspace GPIO pin.
def set_gpio_interrupt_edge(edge='falling'): """Set the interrupt edge on the userspace GPIO pin. :param edge: The interrupt edge ('none', 'falling', 'rising'). :type edge: string """ # we're only interested in the falling edge (1 -> 0) start_time = time.time() time_limit = start_time + FILE_IO_TIMEOUT while time.time() < time_limit: try: with open(GPIO_INTERRUPT_DEVICE_EDGE, 'w') as gpio_edge: gpio_edge.write(edge) return except IOError: pass
Wait until a file exists.
def wait_until_file_exists(filename): """Wait until a file exists. :param filename: The name of the file to wait for. :type filename: string """ start_time = time.time() time_limit = start_time + FILE_IO_TIMEOUT while time.time() < time_limit: try: with open(filename): return except IOError: pass raise Timeout("Waiting too long for %s." % filename)