_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q18500
Nipap._parse_vrf_query
train
def _parse_vrf_query(self, query_str): """ Parse a smart search query for VRFs This is a helper function to smart_search_vrf for easier unit testing of the parser. """ sp = smart_parsing.VrfSmartParser() query = sp.parse(query_str) return query
python
{ "resource": "" }
q18501
Nipap.add_pool
train
def add_pool(self, auth, attr): """ Create a pool according to `attr`. * `auth` [BaseAuth] AAA options. * `attr` [pool_attr] A dict containing the attributes the new pool should have. Returns a dict describing the pool which was added. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.add_pool` for full understanding. """ self._logger.debug("add_pool called; attrs: %s" % unicode(attr)) # sanity check - do we have all attributes? req_attr = ['name', 'description', 'default_type'] self._check_pool_attr(attr, req_attr) insert, params = self._sql_expand_insert(attr) sql = "INSERT INTO ip_net_pool " + insert self._execute(sql, params) pool_id = self._lastrowid() pool = self.list_pool(auth, { 'id': pool_id })[0] # write to audit table audit_params = { 'pool_id': pool['id'], 'pool_name': pool['name'], 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, 'description': 'Added pool %s with attr: %s' % (pool['name'], unicode(attr)) } sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params) return pool
python
{ "resource": "" }
q18502
Nipap.list_pool
train
def list_pool(self, auth, spec=None): """ Return a list of pools. * `auth` [BaseAuth] AAA options. * `spec` [pool_spec] Specifies what pool(s) to list. Of omitted, all will be listed. Returns a list of dicts. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.list_pool` for full understanding. """ if spec is None: spec = {} self._logger.debug("list_pool called; spec: %s" % unicode(spec)) sql = """SELECT DISTINCT (po.id), po.id, po.name, po.description, po.default_type, po.ipv4_default_prefix_length, po.ipv6_default_prefix_length, po.member_prefixes_v4, po.member_prefixes_v6, po.used_prefixes_v4, po.used_prefixes_v6, po.free_prefixes_v4, po.free_prefixes_v6, po.total_prefixes_v4, po.total_prefixes_v6, po.total_addresses_v4, po.total_addresses_v6, po.used_addresses_v4, po.used_addresses_v6, po.free_addresses_v4, po.free_addresses_v6, po.tags, po.avps, vrf.id AS vrf_id, vrf.rt AS vrf_rt, vrf.name AS vrf_name, (SELECT array_agg(prefix::text) FROM (SELECT prefix FROM ip_net_plan WHERE pool_id=po.id ORDER BY prefix) AS a) AS prefixes FROM ip_net_pool AS po LEFT OUTER JOIN ip_net_plan AS inp ON (inp.pool_id = po.id) LEFT OUTER JOIN ip_net_vrf AS vrf ON (vrf.id = inp.vrf_id)""" params = list() # expand spec where, params = self._expand_pool_spec(spec) if len(where) > 0: sql += " WHERE " + where sql += " ORDER BY name" self._execute(sql, params) res = list() for row in self._curs_pg: p = dict(row) # Make sure that prefixes is a list, even if there are no prefixes if p['prefixes'] is None: p['prefixes'] = [] res.append(p) return res
python
{ "resource": "" }
q18503
Nipap._check_pool_attr
train
def _check_pool_attr(self, attr, req_attr=None): """ Check pool attributes. """ if req_attr is None: req_attr = [] # check attribute names self._check_attr(attr, req_attr, _pool_attrs) # validate IPv4 prefix length if attr.get('ipv4_default_prefix_length') is not None: try: attr['ipv4_default_prefix_length'] = \ int(attr['ipv4_default_prefix_length']) if (attr['ipv4_default_prefix_length'] > 32 or attr['ipv4_default_prefix_length'] < 1): raise ValueError() except ValueError: raise NipapValueError('Default IPv4 prefix length must be an integer between 1 and 32.') # validate IPv6 prefix length if attr.get('ipv6_default_prefix_length'): try: attr['ipv6_default_prefix_length'] = \ int(attr['ipv6_default_prefix_length']) if (attr['ipv6_default_prefix_length'] > 128 or attr['ipv6_default_prefix_length'] < 1): raise ValueError() except ValueError: raise NipapValueError('Default IPv6 prefix length must be an integer between 1 and 128.')
python
{ "resource": "" }
q18504
Nipap._get_pool
train
def _get_pool(self, auth, spec): """ Get a pool. Shorthand function to reduce code in the functions below, since more or less all of them needs to perform the actions that are specified here. The major difference to :func:`list_pool` is that an exception is raised if no pool matching the spec is found. """ pool = self.list_pool(auth, spec) if len(pool) == 0: raise NipapInputError("non-existing pool specified") return pool[0]
python
{ "resource": "" }
q18505
Nipap.edit_pool
train
def edit_pool(self, auth, spec, attr): """ Update pool given by `spec` with attributes `attr`. * `auth` [BaseAuth] AAA options. * `spec` [pool_spec] Specifies what pool to edit. * `attr` [pool_attr] Attributes to update and their new values. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.edit_pool` for full understanding. """ self._logger.debug("edit_pool called; spec: %s attr: %s" % (unicode(spec), unicode(attr))) if ('id' not in spec and 'name' not in spec) or ( 'id' in spec and 'name' in spec ): raise NipapMissingInputError('''pool spec must contain either 'id' or 'name' ''') self._check_pool_attr(attr) where, params1 = self._expand_pool_spec(spec) update, params2 = self._sql_expand_update(attr) params = dict(params2.items() + params1.items()) pools = self.list_pool(auth, spec) sql = "UPDATE ip_net_pool SET " + update sql += " FROM ip_net_pool AS po WHERE ip_net_pool.id = po.id AND " + where sql += " RETURNING po.id AS id" self._execute(sql, params) updated_pools = self._get_updated_rows(auth, self.search_pool) # write to audit table audit_params = { 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source } for p in pools: audit_params['pool_id'] = p['id'] audit_params['pool_name'] = p['name'] audit_params['description'] = 'Edited pool %s attr: %s' % (p['name'], unicode(attr)) sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params) return updated_pools
python
{ "resource": "" }
q18506
Nipap.smart_search_pool
train
def smart_search_pool(self, auth, query_str, search_options=None, extra_query=None): """ Perform a smart search on pool list. * `auth` [BaseAuth] AAA options. * `query_str` [string] Search string * `search_options` [options_dict] Search options. See :func:`search_pool`. * `extra_query` [dict_to_sql] Extra search terms, will be AND:ed together with what is extracted from the query string. Return a dict with three elements: * :attr:`interpretation` - How the query string was interpreted. * :attr:`search_options` - Various search_options. * :attr:`result` - The search result. The :attr:`interpretation` is given as a list of dicts, each explaining how a part of the search key was interpreted (ie. what pool attribute the search operation was performed on). The :attr:`result` is a list of dicts containing the search result. The smart search function tries to convert the query from a text string to a `query` dict which is passed to the :func:`search_pool` function. If multiple search keys are detected, they are combined with a logical AND. It will basically just take each search term and try to match it against the name or description column with regex match. See the :func:`search_pool` function for an explanation of the `search_options` argument. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.smart_search_pool` for full understanding. """ if search_options is None: search_options = {} self._logger.debug("smart_search_pool query string: %s" % query_str) success, query = self._parse_pool_query(query_str) if not success: return { 'interpretation': query, 'search_options': search_options, 'result': [], 'error': True, 'error_message': 'query interpretation failed' } if extra_query is not None: query = { 'operator': 'and', 'val1': query, 'val2': extra_query } self._logger.debug("smart_search_pool; query expanded to: %s" % unicode(query)) search_result = self.search_pool(auth, query, search_options) search_result['interpretation'] = query search_result['error'] = False return search_result
python
{ "resource": "" }
q18507
Nipap._parse_pool_query
train
def _parse_pool_query(self, query_str): """ Parse a smart search query for pools This is a helper function to smart_search_pool for easier unit testing of the parser. """ sp = smart_parsing.PoolSmartParser() query = sp.parse(query_str) return query
python
{ "resource": "" }
q18508
Nipap._expand_prefix_spec
train
def _expand_prefix_spec(self, spec, prefix = ''): """ Expand prefix specification to SQL. """ # sanity checks if type(spec) is not dict: raise NipapInputError('invalid prefix specification') for key in spec.keys(): if key not in _prefix_spec: raise NipapExtraneousInputError("Key '" + key + "' not allowed in prefix spec.") where = "" params = {} # if we have id, no other input is needed if 'id' in spec: if spec != {'id': spec['id']}: raise NipapExtraneousInputError("If 'id' specified, no other keys are allowed.") family = None if 'family' in spec: family = spec['family'] del(spec['family']) # rename prefix columns spec2 = {} for k in spec: spec2[prefix + k] = spec[k] spec = spec2 # handle keys which refer to external keys if prefix + 'vrf_id' in spec: # "translate" vrf id None to id = 0 if spec[prefix + 'vrf_id'] is None: spec[prefix + 'vrf_id'] = 0 if prefix + 'vrf_name' in spec: spec['vrf.name'] = spec[prefix + 'vrf_name'] del(spec[prefix + 'vrf_name']) if prefix + 'vrf_rt' in spec: spec['vrf.rt'] = spec[prefix + 'vrf_rt'] del(spec[prefix + 'vrf_rt']) if prefix + 'pool_name' in spec: spec['pool.name'] = spec[prefix + 'pool_name'] del(spec[prefix + 'pool_name']) where, params = self._sql_expand_where(spec) # prefix family needs to be handled separately as it's not stored # explicitly in the database if family: if len(params) == 0: where = "family(" + prefix + "prefix) = %(family)s" else: where += " AND family(" + prefix + "prefix) = %(family)s" params['family'] = family self._logger.debug("_expand_prefix_spec; where: %s params: %s" % (where, unicode(params))) return where, params
python
{ "resource": "" }
q18509
Nipap._expand_prefix_query
train
def _expand_prefix_query(self, query, table_name = None): """ Expand prefix query dict into a WHERE-clause. If you need to prefix each column reference with a table name, that can be supplied via the table_name argument. """ where = unicode() opt = list() # handle table name, can be None if table_name is None: col_prefix = "" else: col_prefix = table_name + "." if 'val1' not in query: raise NipapMissingInputError("'val1' must be specified") if 'val2' not in query: raise NipapMissingInputError("'val2' must be specified") if type(query['val1']) == dict and type(query['val2']) == dict: # Sub expression, recurse! This is used for boolean operators: AND OR # add parantheses sub_where1, opt1 = self._expand_prefix_query(query['val1'], table_name) sub_where2, opt2 = self._expand_prefix_query(query['val2'], table_name) try: where += unicode(" (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) ) except KeyError: raise NipapNoSuchOperatorError("No such operator %s" % unicode(query['operator'])) opt += opt1 opt += opt2 else: # TODO: raise exception if someone passes one dict and one "something else"? # val1 is key, val2 is value. if query['val1'] not in _prefix_spec: raise NipapInputError('Search variable \'%s\' unknown' % unicode(query['val1'])) # build where clause if query['operator'] not in _operation_map: raise NipapNoSuchOperatorError("No such operator %s" % query['operator']) if query['val1'] == 'vrf_id' and query['val2'] is None: query['val2'] = 0 # workaround for handling equal matches of NULL-values if query['operator'] == 'equals' and query['val2'] is None: query['operator'] = 'is' elif query['operator'] == 'not_equals' and query['val2'] is None: query['operator'] = 'is_not' if query['operator'] in ( 'contains', 'contains_equals', 'contained_within', 'contained_within_equals'): where = " iprange(prefix) %(operator)s %%s " % { 'col_prefix': col_prefix, 'operator': _operation_map[query['operator']] } elif query['operator'] in ('equals_any',): where = unicode(" %%s = ANY (%s%s::citext[]) " % ( col_prefix, _prefix_spec[query['val1']]['column']) ) elif query['operator'] in ( 'like', 'regex_match', 'regex_not_match'): # we COALESCE column with '' to allow for example a regexp # search on '.*' to match columns which are NULL in the # database where = unicode(" COALESCE(%s%s, '') %s %%s " % ( col_prefix, _prefix_spec[query['val1']]['column'], _operation_map[query['operator']] ) ) else: where = unicode(" %s%s %s %%s " % ( col_prefix, _prefix_spec[query['val1']]['column'], _operation_map[query['operator']] ) ) opt.append(query['val2']) return where, opt
python
{ "resource": "" }
q18510
Nipap._db_remove_prefix
train
def _db_remove_prefix(self, spec, recursive = False): """ Do the underlying database operations to delete a prefix """ if recursive: prefix = spec['prefix'] del spec['prefix'] where, params = self._expand_prefix_spec(spec) spec['prefix'] = prefix params['prefix'] = prefix where = 'prefix <<= %(prefix)s AND ' + where else: where, params = self._expand_prefix_spec(spec) sql = "DELETE FROM ip_net_plan AS p WHERE %s" % where self._execute(sql, params)
python
{ "resource": "" }
q18511
Nipap.remove_prefix
train
def remove_prefix(self, auth, spec, recursive = False): """ Remove prefix matching `spec`. * `auth` [BaseAuth] AAA options. * `spec` [prefix_spec] Specifies prefixe to remove. * `recursive` [bool] When set to True, also remove child prefixes. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.remove_prefix` for full understanding. """ self._logger.debug("remove_prefix called; spec: %s" % unicode(spec)) # sanity check - do we have all attributes? if 'id' in spec: # recursive requires a prefix, so translate id to prefix p = self.list_prefix(auth, spec)[0] del spec['id'] spec['prefix'] = p['prefix'] spec['vrf_id'] = p['vrf_id'] elif 'prefix' in spec: pass else: raise NipapMissingInputError('missing prefix or id of prefix') prefixes = self.list_prefix(auth, spec) if recursive: spec['type'] = 'host' self._db_remove_prefix(spec, recursive) del spec['type'] self._db_remove_prefix(spec, recursive) else: self._db_remove_prefix(spec) # write to audit table audit_params = { 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source } for p in prefixes: audit_params['prefix_id'] = p['id'] audit_params['prefix_prefix'] = p['prefix'] audit_params['description'] = 'Removed prefix %s' % p['prefix'] audit_params['vrf_id'] = p['vrf_id'] audit_params['vrf_rt'] = p['vrf_rt'] audit_params['vrf_name'] = p['vrf_name'] sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params) if p['pool_id'] is not None: pool = self._get_pool(auth, { 'id': p['pool_id'] }) audit_params2 = { 'pool_id': pool['id'], 'pool_name': pool['name'], 'prefix_id': p['id'], 'prefix_prefix': p['prefix'], 'description': 'Prefix %s removed from pool %s' % (p['prefix'], pool['name']), 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source } sql, params = self._sql_expand_insert(audit_params2) self._execute('INSERT INTO ip_net_log %s' % sql, params)
python
{ "resource": "" }
q18512
Nipap.smart_search_prefix
train
def smart_search_prefix(self, auth, query_str, search_options=None, extra_query=None): """ Perform a smart search on prefix list. * `auth` [BaseAuth] AAA options. * `query_str` [string] Search string * `search_options` [options_dict] Search options. See :func:`search_prefix`. * `extra_query` [dict_to_sql] Extra search terms, will be AND:ed together with what is extracted from the query string. Return a dict with three elements: * :attr:`interpretation` - How the query string was interpreted. * :attr:`search_options` - Various search_options. * :attr:`result` - The search result. The :attr:`interpretation` is given as a list of dicts, each explaining how a part of the search key was interpreted (ie. what prefix attribute the search operation was performed on). The :attr:`result` is a list of dicts containing the search result. The smart search function tries to convert the query from a text string to a `query` dict which is passed to the :func:`search_prefix` function. If multiple search keys are detected, they are combined with a logical AND. It tries to automatically detect IP addresses and prefixes and put these into the `query` dict with "contains_within" operators and so forth. See the :func:`search_prefix` function for an explanation of the `search_options` argument. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.smart_search_prefix` for full understanding. """ if search_options is None: search_options = {} self._logger.debug("smart_search_prefix query string: %s" % query_str) success, query = self._parse_prefix_query(query_str) if not success: return { 'interpretation': query, 'search_options': search_options, 'result': [], 'error': True, 'error_message': 'query interpretation failed' } if extra_query is not None: query = { 'operator': 'and', 'val1': query, 'val2': extra_query } self._logger.debug("smart_search_prefix: query expanded to: %s" % unicode(query)) search_result = self.search_prefix(auth, query, search_options) search_result['interpretation'] = query search_result['error'] = False return search_result
python
{ "resource": "" }
q18513
Nipap._parse_prefix_query
train
def _parse_prefix_query(self, query_str): """ Parse a smart search query for prefixes This is a helper function to smart_search_prefix for easier unit testing of the parser. """ sp = smart_parsing.PrefixSmartParser() query = sp.parse(query_str) return query
python
{ "resource": "" }
q18514
Nipap.list_asn
train
def list_asn(self, auth, asn=None): """ List AS numbers matching `spec`. * `auth` [BaseAuth] AAA options. * `spec` [asn_spec] An automous system number specification. If omitted, all ASNs are returned. Returns a list of dicts. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.list_asn` for full understanding. """ if asn is None: asn = {} self._logger.debug("list_asn called; asn: %s" % unicode(asn)) sql = "SELECT * FROM ip_net_asn" params = list() where, params = self._expand_asn_spec(asn) if len(params) > 0: sql += " WHERE " + where sql += " ORDER BY asn ASC" self._execute(sql, params) res = list() for row in self._curs_pg: res.append(dict(row)) return res
python
{ "resource": "" }
q18515
Nipap.add_asn
train
def add_asn(self, auth, attr): """ Add AS number to NIPAP. * `auth` [BaseAuth] AAA options. * `attr` [asn_attr] ASN attributes. Returns a dict describing the ASN which was added. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.add_asn` for full understanding. """ self._logger.debug("add_asn called; attr: %s" % unicode(attr)) # sanity check - do we have all attributes? req_attr = [ 'asn', ] allowed_attr = [ 'asn', 'name' ] self._check_attr(attr, req_attr, allowed_attr) insert, params = self._sql_expand_insert(attr) sql = "INSERT INTO ip_net_asn " + insert self._execute(sql, params) asn = self.list_asn(auth, { 'asn': attr['asn'] })[0] # write to audit table audit_params = { 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, 'description': 'Added ASN %s with attr: %s' % (attr['asn'], unicode(attr)) } sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params) return asn
python
{ "resource": "" }
q18516
Nipap.edit_asn
train
def edit_asn(self, auth, asn, attr): """ Edit AS number * `auth` [BaseAuth] AAA options. * `asn` [integer] AS number to edit. * `attr` [asn_attr] New AS attributes. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.edit_asn` for full understanding. """ self._logger.debug("edit_asn called; asn: %s attr: %s" % (unicode(asn), unicode(attr))) # sanity check - do we have all attributes? req_attr = [ ] allowed_attr = [ 'name', ] self._check_attr(attr, req_attr, allowed_attr) asns = self.list_asn(auth, asn) where, params1 = self._expand_asn_spec(asn) update, params2 = self._sql_expand_update(attr) params = dict(params2.items() + params1.items()) sql = "UPDATE ip_net_asn SET " + update + " WHERE " + where sql += " RETURNING *" self._execute(sql, params) updated_asns = [] for row in self._curs_pg: updated_asns.append(dict(row)) # write to audit table for a in asns: audit_params = { 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source } audit_params['description'] = 'Edited ASN %s attr: %s' % (unicode(a['asn']), unicode(attr)) sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params) return updated_asns
python
{ "resource": "" }
q18517
Nipap.remove_asn
train
def remove_asn(self, auth, asn): """ Remove an AS number. * `auth` [BaseAuth] AAA options. * `spec` [asn] An ASN specification. Remove ASNs matching the `asn` argument. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.remove_asn` for full understanding. """ self._logger.debug("remove_asn called; asn: %s" % unicode(asn)) # get list of ASNs to remove before removing them asns = self.list_asn(auth, asn) # remove where, params = self._expand_asn_spec(asn) sql = "DELETE FROM ip_net_asn WHERE " + where self._execute(sql, params) # write to audit table for a in asns: audit_params = { 'username': auth.username, 'authenticated_as': auth.authenticated_as, 'full_name': auth.full_name, 'authoritative_source': auth.authoritative_source, 'description': 'Removed ASN %s' % unicode(a['asn']) } sql, params = self._sql_expand_insert(audit_params) self._execute('INSERT INTO ip_net_log %s' % sql, params)
python
{ "resource": "" }
q18518
Nipap.smart_search_asn
train
def smart_search_asn(self, auth, query_str, search_options=None, extra_query=None): """ Perform a smart search operation among AS numbers * `auth` [BaseAuth] AAA options. * `query_str` [string] Search string * `search_options` [options_dict] Search options. See :func:`search_asn`. * `extra_query` [dict_to_sql] Extra search terms, will be AND:ed together with what is extracted from the query string. Return a dict with three elements: * :attr:`interpretation` - How the query string was interpreted. * :attr:`search_options` - Various search_options. * :attr:`result` - The search result. The :attr:`interpretation` is given as a list of dicts, each explaining how a part of the search key was interpreted (ie. what ASN attribute the search operation was performed on). The :attr:`result` is a list of dicts containing the search result. The smart search function tries to convert the query from a text string to a `query` dict which is passed to the :func:`search_asn` function. If multiple search keys are detected, they are combined with a logical AND. See the :func:`search_asn` function for an explanation of the `search_options` argument. This is the documentation of the internal backend function. It's exposed over XML-RPC, please also see the XML-RPC documentation for :py:func:`nipap.xmlrpc.NipapXMLRPC.smart_search_asn` for full understanding. """ if search_options is None: search_options = {} self._logger.debug("smart_search_asn called; query_str: %s" % query_str) success, query = self._parse_asn_query(query_str) if not success: return { 'interpretation': query, 'search_options': search_options, 'result': [], 'error': True, 'error_message': 'query interpretaion failed' } if extra_query is not None: query = { 'operator': 'and', 'val1': query, 'val2': extra_query } self._logger.debug("smart_search_asn; query expanded to: %s" % unicode(query)) search_result = self.search_asn(auth, query, search_options) search_result['interpretation'] = query search_result['error'] = False return search_result
python
{ "resource": "" }
q18519
Nipap._parse_asn_query
train
def _parse_asn_query(self, query_str): """ Parse a smart search query for ASNs This is a helper function to smart_search_asn for easier unit testing of the parser. """ # find query parts query_str_parts = self._get_query_parts(query_str) # go through parts and add to query_parts list query_parts = list() for query_str_part in query_str_parts: is_int = True try: int(query_str_part['string']) except ValueError: is_int = False if is_int: self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as integer (ASN)") query_parts.append({ 'interpretation': { 'string': query_str_part['string'], 'interpretation': 'asn', 'attribute': 'asn', 'operator': 'equals', }, 'operator': 'equals', 'val1': 'asn', 'val2': query_str_part['string'] }) else: self._logger.debug("Query part '" + query_str_part['string'] + "' interpreted as text") query_parts.append({ 'interpretation': { 'string': query_str_part['string'], 'interpretation': 'text', 'attribute': 'name', 'operator': 'regex', }, 'operator': 'regex_match', 'val1': 'name', 'val2': query_str_part['string'] }) # Sum all query parts to one query query = {} if len(query_parts) > 0: query = query_parts[0] if len(query_parts) > 1: for query_part in query_parts[1:]: query = { 'interpretation': { 'interpretation': 'and', 'operator': 'and', }, 'operator': 'and', 'val1': query_part, 'val2': query } return True, query
python
{ "resource": "" }
q18520
Nipap._expand_tag_query
train
def _expand_tag_query(self, query, table_name = None): """ Expand Tag query dict into a WHERE-clause. If you need to prefix each column reference with a table name, that can be supplied via the table_name argument. """ where = unicode() opt = list() # handle table name, can be None if table_name is None: col_prefix = "" else: col_prefix = table_name + "." if type(query['val1']) == dict and type(query['val2']) == dict: # Sub expression, recurse! This is used for boolean operators: AND OR # add parantheses sub_where1, opt1 = self._expand_tag_query(query['val1'], table_name) sub_where2, opt2 = self._expand_tag_query(query['val2'], table_name) try: where += unicode(" (%s %s %s) " % (sub_where1, _operation_map[query['operator']], sub_where2) ) except KeyError: raise NipapNoSuchOperatorError("No such operator %s" % unicode(query['operator'])) opt += opt1 opt += opt2 else: # TODO: raise exception if someone passes one dict and one "something else"? # val1 is variable, val2 is string. tag_attr = dict() tag_attr['name'] = 'name' if query['val1'] not in tag_attr: raise NipapInputError('Search variable \'%s\' unknown' % unicode(query['val1'])) # workaround for handling equal matches of NULL-values if query['operator'] == 'equals' and query['val2'] is None: query['operator'] = 'is' elif query['operator'] == 'not_equals' and query['val2'] is None: query['operator'] = 'is_not' # build where clause if query['operator'] not in _operation_map: raise NipapNoSuchOperatorError("No such operator %s" % query['operator']) where = unicode(" %s%s %s %%s " % ( col_prefix, tag_attr[query['val1']], _operation_map[query['operator']] ) ) opt.append(query['val2']) return where, opt
python
{ "resource": "" }
q18521
setup_connection
train
def setup_connection(): """ Set up the global pynipap connection object """ # get connection parameters, first from environment variables if they are # defined, otherwise from .nipaprc try: con_params = { 'username': os.getenv('NIPAP_USERNAME') or cfg.get('global', 'username'), 'password': os.getenv('NIPAP_PASSWORD') or cfg.get('global', 'password'), 'hostname': os.getenv('NIPAP_HOST') or cfg.get('global', 'hostname'), 'port' : os.getenv('NIPAP_PORT') or cfg.get('global', 'port') } except (configparser.NoOptionError, configparser.NoSectionError) as exc: print("ERROR:", str(exc), file=sys.stderr) print("HINT: Please define the username, password, hostname and port in your .nipaprc under the section 'global' or provide them through the environment variables NIPAP_HOST, NIPAP_PORT, NIPAP_USERNAME and NIPAP_PASSWORD.", file=sys.stderr) sys.exit(1) # if we haven't got a password (from env var or config) we interactively # prompt for one if con_params['password'] is None: import getpass con_params['password'] = getpass.getpass() # build XML-RPC URI pynipap.xmlrpc_uri = "http://%(username)s:%(password)s@%(hostname)s:%(port)s" % con_params ao = pynipap.AuthOptions({ 'authoritative_source': 'nipap', 'username': os.getenv('NIPAP_IMPERSONATE_USERNAME') or con_params['username'], 'full_name': os.getenv('NIPAP_IMPERSONATE_FULL_NAME'), })
python
{ "resource": "" }
q18522
get_pool
train
def get_pool(arg = None, opts = None, abort = False): """ Returns pool to work with Returns a pynipap.Pool object representing the pool we are working with. """ # yep, global variables are evil global pool try: pool = Pool.list({ 'name': arg })[0] except IndexError: if abort: print("Pool '%s' not found." % str(arg), file=sys.stderr) sys.exit(1) else: pool = None return pool
python
{ "resource": "" }
q18523
get_vrf
train
def get_vrf(arg = None, default_var = 'default_vrf_rt', abort = False): """ Returns VRF to work in Returns a pynipap.VRF object representing the VRF we are working in. If there is a VRF set globally, return this. If not, fetch the VRF named 'arg'. If 'arg' is None, fetch the default_vrf attribute from the config file and return this VRF. """ # yep, global variables are evil global vrf # if there is a VRF set, return it if vrf is not None: return vrf if arg is None: # fetch default vrf try: vrf_rt = cfg.get('global', default_var) except configparser.NoOptionError: # default to all VRFs vrf_rt = 'all' else: vrf_rt = arg if vrf_rt.lower() == 'all': vrf = VRF() vrf.rt = 'all' else: if vrf_rt.lower() in ('-', 'none'): vrf_rt = None try: vrf = VRF.search({ 'val1': 'rt', 'operator': 'equals', 'val2': vrf_rt })['result'][0] except (KeyError, IndexError): if abort: print("VRF with [RT: %s] not found." % str(vrf_rt), file=sys.stderr) sys.exit(1) else: vrf = False return vrf
python
{ "resource": "" }
q18524
list_pool
train
def list_pool(arg, opts, shell_opts): """ List pools matching a search criteria """ search_string = '' if type(arg) == list or type(arg) == tuple: search_string = ' '.join(arg) v = get_vrf(opts.get('vrf_rt'), default_var='default_list_vrf_rt', abort=True) if v.rt == 'all': vrf_q = None else: vrf_q = { 'operator': 'equals', 'val1': 'vrf_rt', 'val2': v.rt } offset = 0 limit = 100 while True: res = Pool.smart_search(search_string, { 'offset': offset, 'max_result': limit }, vrf_q) if offset == 0: # first time in loop? if shell_opts.show_interpretation: print("Query interpretation:") _parse_interp_pool(res['interpretation']) if res['error']: print("Query failed: %s" % res['error_message']) return if len(res['result']) == 0: print("No matching pools found") return print("%-19s %-2s %-39s %-13s %-8s %s" % ( "Name", "#", "Description", "Default type", "4 / 6", "Implied VRF" )) print("------------------------------------------------------------------------------------------------") for p in res['result']: if len(str(p.description)) > 38: desc = p.description[0:34] + "..." else: desc = p.description vrf_rt = '-' vrf_name = '-' if p.vrf is not None: vrf_rt = p.vrf.rt or '-' vrf_name = p.vrf.name tags = '-' if len(p.tags) > 0: tags = "#%d" % (len(p.tags)) print("%-19s %-2s %-39s %-13s %-2s / %-3s [RT: %s] %s" % ( p.name, tags, desc, p.default_type, str(p.ipv4_default_prefix_length or '-'), str(p.ipv6_default_prefix_length or '-'), vrf_rt, vrf_name )) if len(res['result']) < limit: break offset += limit
python
{ "resource": "" }
q18525
list_vrf
train
def list_vrf(arg, opts, shell_opts): """ List VRFs matching a search criteria """ search_string = '' if type(arg) == list or type(arg) == tuple: search_string = ' '.join(arg) offset = 0 limit = 100 while True: res = VRF.smart_search(search_string, { 'offset': offset, 'max_result': limit }) if offset == 0: if shell_opts.show_interpretation: print("Query interpretation:") _parse_interp_vrf(res['interpretation']) if res['error']: print("Query failed: %s" % res['error_message']) return if len(res['result']) == 0: print("No VRFs matching '%s' found." % search_string) return print("%-16s %-22s %-2s %-40s" % ("VRF RT", "Name", "#", "Description")) print("--------------------------------------------------------------------------------") for v in res['result']: tags = '-' if len(v.tags) > 0: tags = '#%d' % len(v.tags) if len(str(v.description)) > 100: desc = v.description[0:97] + "..." else: desc = v.description print("%-16s %-22s %-2s %-40s" % (v.rt or '-', v.name, tags, desc)) if len(res['result']) < limit: break offset += limit
python
{ "resource": "" }
q18526
_prefix_from_opts
train
def _prefix_from_opts(opts): """ Return a prefix based on options passed from command line Used by add_prefix() and add_prefix_from_pool() to avoid duplicate parsing """ p = Prefix() p.prefix = opts.get('prefix') p.type = opts.get('type') p.description = opts.get('description') p.node = opts.get('node') p.country = opts.get('country') p.order_id = opts.get('order_id') p.customer_id = opts.get('customer_id') p.alarm_priority = opts.get('alarm_priority') p.comment = opts.get('comment') p.monitor = _str_to_bool(opts.get('monitor')) p.vlan = opts.get('vlan') p.status = opts.get('status') or 'assigned' # default to assigned p.tags = list(csv.reader([opts.get('tags', '')], escapechar='\\'))[0] p.expires = opts.get('expires') return p
python
{ "resource": "" }
q18527
add_vrf
train
def add_vrf(arg, opts, shell_opts): """ Add VRF to NIPAP """ v = VRF() v.rt = opts.get('rt') v.name = opts.get('name') v.description = opts.get('description') v.tags = list(csv.reader([opts.get('tags', '')], escapechar='\\'))[0] for avp in opts.get('extra-attribute', []): try: key, value = avp.split('=', 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: 'key=value'\n" % avp, file=sys.stderr) return v.avps[key] = value try: v.save() except pynipap.NipapError as exc: print("Could not add VRF to NIPAP: %s" % str(exc), file=sys.stderr) sys.exit(1) print("Added %s" % (vrf_format(v)))
python
{ "resource": "" }
q18528
view_vrf
train
def view_vrf(arg, opts, shell_opts): """ View a single VRF """ if arg is None: print("ERROR: Please specify the RT of the VRF to view.", file=sys.stderr) sys.exit(1) # interpret as default VRF (ie, RT = None) if arg.lower() in ('-', 'none'): arg = None try: v = VRF.search({ 'val1': 'rt', 'operator': 'equals', 'val2': arg } )['result'][0] except (KeyError, IndexError): print("VRF with [RT: %s] not found." % str(arg), file=sys.stderr) sys.exit(1) print("-- VRF") print(" %-26s : %d" % ("ID", v.id)) print(" %-26s : %s" % ("RT", v.rt)) print(" %-26s : %s" % ("Name", v.name)) print(" %-26s : %s" % ("Description", v.description)) print("-- Extra Attributes") if v.avps is not None: for key in sorted(v.avps, key=lambda s: s.lower()): print(" %-26s : %s" % (key, v.avps[key])) print("-- Tags") for tag_name in sorted(v.tags, key=lambda s: s.lower()): print(" %s" % tag_name) # statistics if v.total_addresses_v4 == 0: used_percent_v4 = 0 else: used_percent_v4 = (float(v.used_addresses_v4)/v.total_addresses_v4)*100 if v.total_addresses_v6 == 0: used_percent_v6 = 0 else: used_percent_v6 = (float(v.used_addresses_v6)/v.total_addresses_v6)*100 print("-- Statistics") print(" %-26s : %s" % ("IPv4 prefixes", v.num_prefixes_v4)) print(" %-26s : %.0f / %.0f (%.2f%% of %.0f)" % ("IPv4 addresses Used / Free", v.used_addresses_v4, v.free_addresses_v4, used_percent_v4, v.total_addresses_v4)) print(" %-26s : %s" % ("IPv6 prefixes", v.num_prefixes_v6)) print(" %-26s : %.4e / %.4e (%.2f%% of %.4e)" % ("IPv6 addresses Used / Free", v.used_addresses_v6, v.free_addresses_v6, used_percent_v6, v.total_addresses_v6))
python
{ "resource": "" }
q18529
modify_vrf
train
def modify_vrf(arg, opts, shell_opts): """ Modify a VRF with the options set in opts """ res = VRF.list({ 'rt': arg }) if len(res) < 1: print("VRF with [RT: %s] not found." % arg, file=sys.stderr) sys.exit(1) v = res[0] if 'rt' in opts: v.rt = opts['rt'] if 'name' in opts: v.name = opts['name'] if 'description' in opts: v.description = opts['description'] if 'tags' in opts: tags = list(csv.reader([opts.get('tags', '')], escapechar='\\'))[0] v.tags = {} for tag_name in tags: tag = Tag() tag.name = tag_name v.tags[tag_name] = tag for avp in opts.get('extra-attribute', []): try: key, value = avp.split('=', 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: 'key=value'\n" % avp, file=sys.stderr) return v.avps[key] = value v.save() print("%s saved." % vrf_format(v))
python
{ "resource": "" }
q18530
modify_pool
train
def modify_pool(arg, opts, shell_opts): """ Modify a pool with the options set in opts """ res = Pool.list({ 'name': arg }) if len(res) < 1: print("No pool with name '%s' found." % arg, file=sys.stderr) sys.exit(1) p = res[0] if 'name' in opts: p.name = opts['name'] if 'description' in opts: p.description = opts['description'] if 'default-type' in opts: p.default_type = opts['default-type'] if 'ipv4_default_prefix_length' in opts: p.ipv4_default_prefix_length = opts['ipv4_default_prefix_length'] if 'ipv6_default_prefix_length' in opts: p.ipv6_default_prefix_length = opts['ipv6_default_prefix_length'] if 'tags' in opts: tags = list(csv.reader([opts.get('tags', '')], escapechar='\\'))[0] p.tags = {} for tag_name in tags: tag = Tag() tag.name = tag_name p.tags[tag_name] = tag for avp in opts.get('extra-attribute', []): try: key, value = avp.split('=', 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: 'key=value'\n" % avp, file=sys.stderr) return p.avps[key] = value p.save() print("Pool '%s' saved." % p.name)
python
{ "resource": "" }
q18531
grow_pool
train
def grow_pool(arg, opts, shell_opts): """ Expand a pool with the ranges set in opts """ if not pool: print("No pool with name '%s' found." % arg, file=sys.stderr) sys.exit(1) if not 'add' in opts: print("Please supply a prefix to add to pool '%s'" % pool.name, file=sys.stderr) sys.exit(1) # Figure out VRF. # If pool already has a member prefix, implied_vrf will be set. Look for new # prefix to add in the same vrf as implied_vrf. # If pool has no members, then use get_vrf() to get vrf to search in for # prefix to add. if pool.vrf is not None: v = pool.vrf else: v = get_vrf(opts.get('vrf_rt'), abort=True) q = { 'prefix': opts['add'] } if v.rt != 'all': q['vrf_rt'] = v.rt res = Prefix.list(q) if len(res) == 0: print("No prefix found matching %s in %s." % (opts['add'], vrf_format(v)), file=sys.stderr) sys.exit(1) elif res[0].pool: if res[0].pool == pool: print("Prefix %s in %s is already assigned to that pool." % (opts['add'], vrf_format(v)), file=sys.stderr) else: print("Prefix %s in %s is already assigned to a different pool ('%s')." % (opts['add'], vrf_format(v), res[0].pool.name), file=sys.stderr) sys.exit(1) res[0].pool = pool res[0].save() print("Prefix %s in %s added to pool '%s'." % (res[0].prefix, vrf_format(v), pool.name))
python
{ "resource": "" }
q18532
shrink_pool
train
def shrink_pool(arg, opts, shell_opts): """ Shrink a pool by removing the ranges in opts from it """ if not pool: print("No pool with name '%s' found." % arg, file=sys.stderr) sys.exit(1) if 'remove' in opts: res = Prefix.list({'prefix': opts['remove'], 'pool_id': pool.id}) if len(res) == 0: print("Pool '%s' does not contain %s." % (pool.name, opts['remove']), file=sys.stderr) sys.exit(1) res[0].pool = None res[0].save() print("Prefix %s removed from pool '%s'." % (res[0].prefix, pool.name)) else: print("Please supply a prefix to add or remove to '%s':" % ( pool.name), file=sys.stderr) for pref in Prefix.list({'pool_id': pool.id}): print(" %s" % pref.prefix)
python
{ "resource": "" }
q18533
prefix_attr_add
train
def prefix_attr_add(arg, opts, shell_opts): """ Add attributes to a prefix """ spec = { 'prefix': arg } v = get_vrf(opts.get('vrf_rt'), abort=True) spec['vrf_rt'] = v.rt res = Prefix.list(spec) if len(res) == 0: print("Prefix %s not found in %s." % (arg, vrf_format(v)), file=sys.stderr) return p = res[0] for avp in opts.get('extra-attribute', []): try: key, value = avp.split('=', 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: 'key=value'\n" % avp, file=sys.stderr) sys.exit(1) if key in p.avps: print("Unable to add extra-attribute: '%s' already exists." % key, file=sys.stderr) sys.exit(1) p.avps[key] = value try: p.save() except NipapError as exc: print("Could not save prefix changes: %s" % str(exc), file=sys.stderr) sys.exit(1) print("Prefix %s in %s saved." % (p.display_prefix, vrf_format(p.vrf)))
python
{ "resource": "" }
q18534
vrf_attr_add
train
def vrf_attr_add(arg, opts, shell_opts): """ Add attributes to a VRF """ if arg is None: print("ERROR: Please specify the RT of the VRF to view.", file=sys.stderr) sys.exit(1) # interpret as default VRF (ie, RT = None) if arg.lower() in ('-', 'none'): arg = None try: v = VRF.search({ 'val1': 'rt', 'operator': 'equals', 'val2': arg } )['result'][0] except (KeyError, IndexError): print("VRF with [RT: %s] not found." % str(arg), file=sys.stderr) sys.exit(1) for avp in opts.get('extra-attribute', []): try: key, value = avp.split('=', 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: 'key=value'\n" % avp, file=sys.stderr) sys.exit(1) if key in v.avps: print("Unable to add extra-attribute: '%s' already exists." % key, file=sys.stderr) sys.exit(1) v.avps[key] = value try: v.save() except NipapError as exc: print("Could not save VRF changes: %s" % str(exc), file=sys.stderr) sys.exit(1) print("%s saved." % vrf_format(v))
python
{ "resource": "" }
q18535
pool_attr_add
train
def pool_attr_add(arg, opts, shell_opts): """ Add attributes to a pool """ res = Pool.list({ 'name': arg }) if len(res) < 1: print("No pool with name '%s' found." % arg, file=sys.stderr) sys.exit(1) p = res[0] for avp in opts.get('extra-attribute', []): try: key, value = avp.split('=', 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: 'key=value'\n" % avp, file=sys.stderr) sys.exit(1) if key in p.avps: print("Unable to add extra-attribute: '%s' already exists." % key, file=sys.stderr) sys.exit(1) p.avps[key] = value try: p.save() except NipapError as exc: print("Could not save pool changes: %s" % str(exc), file=sys.stderr) sys.exit(1) print("Pool '%s' saved." % p.name)
python
{ "resource": "" }
q18536
_complete_string
train
def _complete_string(key, haystack): """ Returns valid string completions Takes the string 'key' and compares it to each of the strings in 'haystack'. The ones which beginns with 'key' are returned as result. """ if len(key) == 0: return haystack match = [] for straw in haystack: if string.find(straw, key) == 0: match.append(straw) return match
python
{ "resource": "" }
q18537
complete_tags
train
def complete_tags(arg): """ Complete NIPAP prefix type """ search_string = '^' if arg is not None: search_string += arg res = Tag.search({ 'operator': 'regex_match', 'val1': 'name', 'val2': search_string }) ret = [] for t in res['result']: ret.append(t.name) return ret
python
{ "resource": "" }
q18538
complete_pool_members
train
def complete_pool_members(arg): """ Complete member prefixes of pool """ # pool should already be globally set res = [] for member in Prefix.list({ 'pool_id': pool.id }): res.append(member.prefix) return _complete_string(arg, res)
python
{ "resource": "" }
q18539
complete_node
train
def complete_node(arg): """ Complete node hostname This function is currently a bit special as it looks in the config file for a command to use to complete a node hostname from an external system. It is configured by setting the config attribute "complete_node_cmd" to a shell command. The string "%search_string%" in the command will be replaced by the current search string. """ # get complete command from config try: cmd = cfg.get('global', 'complete_node_cmd') except configparser.NoOptionError: return [ '', ] cmd = re.sub('%search_string%', pipes.quote(arg), cmd) args = shlex.split(cmd) p = subprocess.Popen(args, stdout=subprocess.PIPE) res, err = p.communicate() nodes = res.split('\n') return nodes
python
{ "resource": "" }
q18540
complete_pool_name
train
def complete_pool_name(arg): """ Returns list of matching pool names """ search_string = '^' if arg is not None: search_string += arg res = Pool.search({ 'operator': 'regex_match', 'val1': 'name', 'val2': search_string }) ret = [] for p in res['result']: ret.append(p.name) return ret
python
{ "resource": "" }
q18541
SmartParser._string_to_ast
train
def _string_to_ast(self, input_string): """ Parse a smart search string and return it in an AST like form """ # simple words # we need to use a regex to match on words because the regular # Word(alphanums) will only match on American ASCII alphanums and since # we try to be Unicode / internationally friendly we need to match much # much more. Trying to expand a word class to catch it all seems futile # so we match on everything *except* a few things, like our operators comp_word = Regex("[^*\s=><~!]+") word = Regex("[^*\s=><~!]+").setResultsName('word') # numbers comp_number = Word(nums) number = Word(nums).setResultsName('number') # IPv4 address ipv4_oct = Regex("((2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?))") comp_ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)) ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)).setResultsName('ipv4_address') # IPv6 address ipv6_address = Regex("((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?").setResultsName('ipv6_address') ipv6_prefix = Combine(ipv6_address + Regex("/(12[0-8]|1[01][0-9]|[0-9][0-9]?)")).setResultsName('ipv6_prefix') # VRF RTs of the form number:number vrf_rt = Combine((comp_ipv4_address | comp_number) + Literal(':') + comp_number).setResultsName('vrf_rt') # tags tags = Combine( Literal('#') + comp_word).setResultsName('tag') # operators for matching match_op = oneOf(' '.join(self.match_operators)).setResultsName('operator') boolean_op = oneOf(' '.join(self.boolean_operators)).setResultsName('boolean') # quoted string d_quoted_string = QuotedString('"', unquoteResults=True, escChar='\\') s_quoted_string = QuotedString('\'', unquoteResults=True, escChar='\\') quoted_string = (s_quoted_string | d_quoted_string).setResultsName('quoted_string') # expression to match a certain value for an attribute expression = Group(word + match_op + (quoted_string | vrf_rt | word | number)).setResultsName('expression') # we work on atoms, which are single quoted strings, match expressions, # tags, VRF RT or simple words. # NOTE: Place them in order of most exact match first! atom = Group(ipv6_prefix | ipv6_address | quoted_string | expression | tags | vrf_rt | boolean_op | word) enclosed = Forward() parens = nestedExpr('(', ')', content=enclosed) enclosed << ( parens | atom ).setResultsName('nested') content = Forward() content << ( ZeroOrMore(enclosed) ) res = content.parseString(input_string) return res
python
{ "resource": "" }
q18542
Command._examine_key
train
def _examine_key(self, key_name, key_val, p, i, option_parsing): """ Examine the current matching key Extracts information, such as function to execute and command options, from the current key (passed to function as 'key_name' and 'key_val'). """ # if the element we reached has an executable registered, save it! if 'exec' in key_val: self.exe = key_val['exec'] # simple bool options, save value if 'type' in key_val and key_val['type'] == 'bool': self.exe_options[key_name] = True # Elements wich takes arguments need special attention if 'argument' in key_val: # is there an argument (the next element)? if len(self.inp_cmd) > i+1: self.key = { 'argument': key_val['argument'] } # there is - save it if key_val['type'] == 'option': # if argument is of type multiple, store result in a list if 'multiple' in key_val and key_val['multiple'] == True: if key_name not in self.exe_options: self.exe_options[key_name] = [] self.exe_options[key_name].append(self.inp_cmd[i+1]) else: self.exe_options[key_name] = self.inp_cmd[i+1] else: self.arg = self.inp_cmd[i+1] # Validate the argument if possible if 'validator' in key_val['argument']: self.key_complete = key_val['argument']['validator'](self.inp_cmd[i+1]) else: self.key_complete = True # if there are sub parameters, add them if 'children' in key_val: self.children = key_val['children'] # If we reached a command without parameters (which # should be the end of the command), unset the children # dict. elif key_val['type'] == 'command': self.children = None # if the command is finished (there is an element after the argument) and # there is an exec_immediately-function, execute it now if 'exec_immediately' in key_val and len(self.inp_cmd) > i+2: key_val['exec_immediately'](self.inp_cmd[i+1], self.exe_options) # clear exe_options as these were options for exec_immediately self.exe_options = {} i += 1 else: # if there is no next element, let key_complete be true # and set children to the option argument self.children = { 'argument': key_val['argument'] } # remove option from further tab completion as it has been filled in, # unless it has the 'multiple' key set, which means it can be filled # in multiple types and will return a list of all values if option_parsing and p == key_name and key_name in self.children: # if multiple, then pass if 'multiple' in self.children[key_name] and self.children[key_name]['multiple'] == True: pass else: del self.children[key_name] # otherwise we are handling a command without arguments else: # Rest arguments? if 'rest_argument' in key_val: self._scoop_rest_arguments = True self.arg = [] self.children = key_val.get('children') if self.exe is not None: option_parsing = True return i, option_parsing
python
{ "resource": "" }
q18543
Command.parse_cmd
train
def parse_cmd(self, tree, inp_cmd = None): """ Extract command and options from string. The tree argument should contain a specifically formatted dict which describes the available commands, options, arguments and callbacks to methods for completion of arguments. TODO: document dict format The inp_cmd argument should contain a list of strings containing the complete command to parse, such as sys.argv (without the first element which specified the command itself). """ # reset state from previous execution self.exe = None self.arg = None self.exe_options = {} self.children = tree['children'] self.key = tree['children'] option_parsing = False self._scoop_rest_arguments = False if inp_cmd is not None: self.inp_cmd = inp_cmd # iterate the list of inputted commands i = 0 while i < len(self.inp_cmd): p = self.inp_cmd[i] self.key = {} # Find which of the valid commands matches the current element of inp_cmd if self.children is not None: self.key_complete = False match = False for param, content in self.children.items(): # match string to command if param.find(p) == 0: self.key[param] = content match = True # If we have an exact match, make sure that # is the only element in self.key if p == param and len(self.inp_cmd) > i+1: self.key_complete = True self.key = { param: content } break # if we are in scoop-rest-mode, place elements not matching # anything in argument-array if not match: if self._scoop_rest_arguments: self.arg.append(p) else: raise InvalidCommand("Invalid argument: " + p) else: raise InvalidCommand('ran out of parameters; command too long') # Note that there are two reasons self.key can contain entries: # 1) The current string (p) contained something and matched a param # 2) The current string (p) is empty and matches all children # If p is empty we don't really have a match but still need to # have data in self.key to show all possible completions at this # level. Therefore, we skip the command matching stuff when # len(p) == 0 if len(p) != 0 and len(self.key) == 1: key, val = list(self.key.items())[0] i, option_parsing = self._examine_key(key, val, p, i, option_parsing) i += 1
python
{ "resource": "" }
q18544
Command.complete
train
def complete(self): """ Return list of valid completions Returns a list of valid completions on the current level in the tree. If an element of type 'value' is found, its complete callback function is called (if set). """ comp = [] for k, v in self.key.items(): # if we have reached a value, try to fetch valid completions if v['type'] == 'value': if 'complete' in v: comp += v['complete'](self.inp_cmd[-1]) # otherwise, k is our valid completion else: comp.append(k) return comp
python
{ "resource": "" }
q18545
Command.next_values
train
def next_values(self): """ Return list of valid next values """ nval = [] for k, v in self.children.items(): # if we have reached a value, try to fetch valid completions if v['type'] == 'value': if 'complete' in v: nval += v['complete']('') # otherwise, k is our valid completion else: nval.append(k) return nval
python
{ "resource": "" }
q18546
PoolController.remove
train
def remove(self, id): """ Remove pool. """ p = Pool.get(int(id)) p.remove() redirect(url(controller = 'pool', action = 'list'))
python
{ "resource": "" }
q18547
PoolController.remove_prefix
train
def remove_prefix(self, id): """ Remove a prefix from pool 'id'. """ if 'prefix' not in request.params: abort(400, 'Missing prefix.') prefix = Prefix.get(int(request.params['prefix'])) prefix.pool = None prefix.save() redirect(url(controller = 'pool', action = 'edit', id = id))
python
{ "resource": "" }
q18548
add_ip_to_net
train
def add_ip_to_net(networks, host): """ Add hosts from ipplan to networks object. """ for network in networks: if host['ipaddr'] in network['network']: network['hosts'].append(host) return
python
{ "resource": "" }
q18549
get_networks
train
def get_networks(base_file, ipaddr_file): """ Gather network and host information from ipplan export files. """ networks = [] base = open(base_file, 'r') csv_reader = csv.reader(base, delimiter='\t') buffer = "" for row in csv_reader: # Fixes quotation bug in ipplan exporter for base.txt if len(networks) > 0 and len(buffer) > 0: networks[-1]['comment'] += " ".join(buffer) buffer = "" if len(row) < 3: buffer = row else: network = { 'network': ipaddress.ip_network("{}/{}".format(row[0], row[2])), 'description': row[1], 'hosts': [], 'comment': "" } if len(row) > 3: network['additional'] = " ".join(row[3:]) networks.append(network) base.close() ipaddr = open(ipaddr_file, 'r') csv_reader = csv.reader(ipaddr, delimiter='\t') for row in csv_reader: host = { 'ipaddr': ipaddress.ip_address(row[0]), 'user': row[1], 'location': row[2], 'description': row[3], 'fqdn': row[4], 'phone': row[5], 'mac': row[6] } if len(row) > 7: host['additional'] = " ".join(row[7:]) add_ip_to_net(networks, host) ipaddr.close() return networks
python
{ "resource": "" }
q18550
add_prefix
train
def add_prefix(network): """ Put your network information in the prefix object. """ p = new_prefix() p.prefix = str(network['network']) p.type = "assignment" p.description = network['description'] p.tags = ['ipplan-import'] p.comment = "" if 'additional' in network: p.comment += network['additional'] if len(network['comment']) > 0: p.comment += network['comment'] return p
python
{ "resource": "" }
q18551
add_host
train
def add_host(host): """ Put your host information in the prefix object. """ p = new_prefix() p.prefix = str(host['ipaddr']) p.type = "host" p.description = host['description'] p.node = host['fqdn'] p.avps = {} # Use remaining data from ipplan to populate comment field. if 'additional' in host: p.comment = host['additional'] # Use specific info to create extra attributes. if len(host['location']) > 0: p.avps['location'] = host['location'] if len(host['mac']) > 0: p.avps['mac'] = host['mac'] if len(host['phone']) > 0: p.avps['phone'] = host['phone'] if len(host['user']) > 0: p.avps['user'] = host['user'] return p
python
{ "resource": "" }
q18552
nipapd_version
train
def nipapd_version(): """ Get version of nipapd we're connected to. Maps to the function :py:func:`nipap.xmlrpc.NipapXMLRPC.version` in the XML-RPC API. Please see the documentation for the XML-RPC function for information regarding the return value. """ xmlrpc = XMLRPCConnection() try: return xmlrpc.connection.version( { 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault)
python
{ "resource": "" }
q18553
nipap_db_version
train
def nipap_db_version(): """ Get schema version of database we're connected to. Maps to the function :py:func:`nipap.backend.Nipap._get_db_version` in the backend. Please see the documentation for the backend function for information regarding the return value. """ xmlrpc = XMLRPCConnection() try: return xmlrpc.connection.db_version( { 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault)
python
{ "resource": "" }
q18554
_fault_to_exception
train
def _fault_to_exception(f): """ Converts XML-RPC Fault objects to Pynipap-exceptions. TODO: Is this one neccesary? Can be done inline... """ e = _fault_to_exception_map.get(f.faultCode) if e is None: e = NipapError return e(f.faultString)
python
{ "resource": "" }
q18555
Tag.from_dict
train
def from_dict(cls, tag=None): """ Create new Tag-object from dict. Suitable for creating objects from XML-RPC data. All available keys must exist. """ if tag is None: tag = {} l = Tag() l.name = tag['name'] return l
python
{ "resource": "" }
q18556
Tag.search
train
def search(cls, query, search_opts=None): """ Search tags. For more information, see the backend function :py:func:`nipap.backend.Nipap.search_tag`. """ if search_opts is None: search_opts = {} xmlrpc = XMLRPCConnection() try: search_result = xmlrpc.connection.search_tag( { 'query': query, 'search_options': search_opts, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) result = dict() result['result'] = [] result['search_options'] = search_result['search_options'] for xml_tag in search_result['result']: result['result'].append(Tag.from_dict(xml_tag)) return result
python
{ "resource": "" }
q18557
VRF.list
train
def list(cls, vrf=None): """ List VRFs. Maps to the function :py:func:`nipap.backend.Nipap.list_vrf` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if vrf is None: vrf = {} xmlrpc = XMLRPCConnection() try: vrf_list = xmlrpc.connection.list_vrf( { 'vrf': vrf, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) res = list() for v in vrf_list: res.append(VRF.from_dict(v)) return res
python
{ "resource": "" }
q18558
VRF.from_dict
train
def from_dict(cls, parm, vrf = None): """ Create new VRF-object from dict. Suitable for creating objects from XML-RPC data. All available keys must exist. """ if vrf is None: vrf = VRF() vrf.id = parm['id'] vrf.rt = parm['rt'] vrf.name = parm['name'] vrf.description = parm['description'] vrf.tags = {} for tag_name in parm['tags']: tag = Tag.from_dict({'name': tag_name }) vrf.tags[tag_name] = tag vrf.avps = parm['avps'] vrf.num_prefixes_v4 = int(parm['num_prefixes_v4']) vrf.num_prefixes_v6 = int(parm['num_prefixes_v6']) vrf.total_addresses_v4 = int(parm['total_addresses_v4']) vrf.total_addresses_v6 = int(parm['total_addresses_v6']) vrf.used_addresses_v4 = int(parm['used_addresses_v4']) vrf.used_addresses_v6 = int(parm['used_addresses_v6']) vrf.free_addresses_v4 = int(parm['free_addresses_v4']) vrf.free_addresses_v6 = int(parm['free_addresses_v6']) return vrf
python
{ "resource": "" }
q18559
VRF.get
train
def get(cls, id): """ Get the VRF with id 'id'. """ # cached? if CACHE: if id in _cache['VRF']: log.debug('cache hit for VRF %d' % id) return _cache['VRF'][id] log.debug('cache miss for VRF %d' % id) try: vrf = VRF.list({ 'id': id })[0] except IndexError: raise NipapNonExistentError('no VRF with ID ' + str(id) + ' found') _cache['VRF'][id] = vrf return vrf
python
{ "resource": "" }
q18560
VRF.search
train
def search(cls, query, search_opts=None): """ Search VRFs. Maps to the function :py:func:`nipap.backend.Nipap.search_vrf` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if search_opts is None: search_opts = {} xmlrpc = XMLRPCConnection() try: search_result = xmlrpc.connection.search_vrf( { 'query': query, 'search_options': search_opts, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) result = dict() result['result'] = [] result['search_options'] = search_result['search_options'] for v in search_result['result']: result['result'].append(VRF.from_dict(v)) return result
python
{ "resource": "" }
q18561
VRF.save
train
def save(self): """ Save changes made to object to NIPAP. If the object represents a new VRF unknown to NIPAP (attribute `id` is `None`) this function maps to the function :py:func:`nipap.backend.Nipap.add_vrf` in the backend, used to create a new VRF. Otherwise it maps to the function :py:func:`nipap.backend.Nipap.edit_vrf` in the backend, used to modify the VRF. Please see the documentation for the backend functions for information regarding input arguments and return values. """ xmlrpc = XMLRPCConnection() data = { 'rt': self.rt, 'name': self.name, 'description': self.description, 'tags': [], 'avps': self.avps } for tag_name in self.tags: data['tags'].append(tag_name) if self.id is None: # New object, create try: vrf = xmlrpc.connection.add_vrf( { 'attr': data, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) else: # Old object, edit try: vrfs = xmlrpc.connection.edit_vrf( { 'vrf': { 'id': self.id }, 'attr': data, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) if len(vrfs) != 1: raise NipapError('VRF edit returned %d entries, should be 1.' % len(vrfs)) vrf = vrfs[0] # Refresh object data with attributes from add/edit operation VRF.from_dict(vrf, self) _cache['VRF'][self.id] = self
python
{ "resource": "" }
q18562
VRF.remove
train
def remove(self): """ Remove VRF. Maps to the function :py:func:`nipap.backend.Nipap.remove_vrf` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ xmlrpc = XMLRPCConnection() try: xmlrpc.connection.remove_vrf( { 'vrf': { 'id': self.id }, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) if self.id in _cache['VRF']: del(_cache['VRF'][self.id])
python
{ "resource": "" }
q18563
Pool.save
train
def save(self): """ Save changes made to pool to NIPAP. If the object represents a new pool unknown to NIPAP (attribute `id` is `None`) this function maps to the function :py:func:`nipap.backend.Nipap.add_pool` in the backend, used to create a new pool. Otherwise it maps to the function :py:func:`nipap.backend.Nipap.edit_pool` in the backend, used to modify the pool. Please see the documentation for the backend functions for information regarding input arguments and return values. """ xmlrpc = XMLRPCConnection() data = { 'name': self.name, 'description': self.description, 'default_type': self.default_type, 'ipv4_default_prefix_length': self.ipv4_default_prefix_length, 'ipv6_default_prefix_length': self.ipv6_default_prefix_length, 'tags': [], 'avps': self.avps } for tag_name in self.tags: data['tags'].append(tag_name) if self.id is None: # New object, create try: pool = xmlrpc.connection.add_pool( { 'attr': data, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) else: # Old object, edit try: pools = xmlrpc.connection.edit_pool( { 'pool': { 'id': self.id }, 'attr': data, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) if len(pools) != 1: raise NipapError('Pool edit returned %d entries, should be 1.' % len(pools)) pool = pools[0] # Refresh object data with attributes from add/edit operation Pool.from_dict(pool, self) _cache['Pool'][self.id] = self
python
{ "resource": "" }
q18564
Pool.get
train
def get(cls, id): """ Get the pool with id 'id'. """ # cached? if CACHE: if id in _cache['Pool']: log.debug('cache hit for pool %d' % id) return _cache['Pool'][id] log.debug('cache miss for pool %d' % id) try: pool = Pool.list({'id': id})[0] except (IndexError, KeyError): raise NipapNonExistentError('no pool with ID ' + str(id) + ' found') _cache['Pool'][id] = pool return pool
python
{ "resource": "" }
q18565
Pool.search
train
def search(cls, query, search_opts=None): """ Search pools. Maps to the function :py:func:`nipap.backend.Nipap.search_pool` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if search_opts is None: search_opts = {} xmlrpc = XMLRPCConnection() try: search_result = xmlrpc.connection.search_pool( { 'query': query, 'search_options': search_opts, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) result = dict() result['result'] = [] result['search_options'] = search_result['search_options'] for pool in search_result['result']: p = Pool.from_dict(pool) result['result'].append(p) return result
python
{ "resource": "" }
q18566
Pool.from_dict
train
def from_dict(cls, parm, pool = None): """ Create new Pool-object from dict. Suitable for creating objects from XML-RPC data. All available keys must exist. """ if pool is None: pool = Pool() pool.id = parm['id'] pool.name = parm['name'] pool.description = parm['description'] pool.default_type = parm['default_type'] pool.ipv4_default_prefix_length = parm['ipv4_default_prefix_length'] pool.ipv6_default_prefix_length = parm['ipv6_default_prefix_length'] for val in ('member_prefixes_v4', 'member_prefixes_v6', 'used_prefixes_v4', 'used_prefixes_v6', 'free_prefixes_v4', 'free_prefixes_v6', 'total_prefixes_v4', 'total_prefixes_v6', 'total_addresses_v4', 'total_addresses_v6', 'used_addresses_v4', 'used_addresses_v6', 'free_addresses_v4', 'free_addresses_v6'): if parm[val] is not None: setattr(pool, val, int(parm[val])) pool.tags = {} for tag_name in parm['tags']: tag = Tag.from_dict({'name': tag_name }) pool.tags[tag_name] = tag pool.avps = parm['avps'] # store VRF object in pool.vrf if parm['vrf_id'] is not None: pool.vrf = VRF.get(parm['vrf_id']) return pool
python
{ "resource": "" }
q18567
Pool.list
train
def list(self, spec=None): """ List pools. Maps to the function :py:func:`nipap.backend.Nipap.list_pool` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if spec is None: spec = {} xmlrpc = XMLRPCConnection() try: pool_list = xmlrpc.connection.list_pool( { 'pool': spec, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) res = list() for pool in pool_list: p = Pool.from_dict(pool) res.append(p) return res
python
{ "resource": "" }
q18568
Prefix.get
train
def get(cls, id): """ Get the prefix with id 'id'. """ # cached? if CACHE: if id in _cache['Prefix']: log.debug('cache hit for prefix %d' % id) return _cache['Prefix'][id] log.debug('cache miss for prefix %d' % id) try: prefix = Prefix.list({'id': id})[0] except IndexError: raise NipapNonExistentError('no prefix with ID ' + str(id) + ' found') _cache['Prefix'][id] = prefix return prefix
python
{ "resource": "" }
q18569
Prefix.find_free
train
def find_free(cls, vrf, args): """ Finds a free prefix. Maps to the function :py:func:`nipap.backend.Nipap.find_free_prefix` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ xmlrpc = XMLRPCConnection() q = { 'args': args, 'auth': AuthOptions().options } # sanity checks if isinstance(vrf, VRF): q['vrf'] = { 'id': vrf.id } elif vrf is None: q['vrf'] = None else: raise NipapValueError('vrf parameter must be instance of VRF class') # run XML-RPC query try: find_res = xmlrpc.connection.find_free_prefix(q) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) pass return find_res
python
{ "resource": "" }
q18570
Prefix.search
train
def search(cls, query, search_opts=None): """ Search for prefixes. Maps to the function :py:func:`nipap.backend.Nipap.search_prefix` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if search_opts is None: search_opts = {} xmlrpc = XMLRPCConnection() try: search_result = xmlrpc.connection.search_prefix( { 'query': query, 'search_options': search_opts, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) result = dict() result['result'] = [] result['search_options'] = search_result['search_options'] for prefix in search_result['result']: p = Prefix.from_dict(prefix) result['result'].append(p) return result
python
{ "resource": "" }
q18571
Prefix.smart_search
train
def smart_search(cls, query_string, search_options=None, extra_query = None): """ Perform a smart prefix search. Maps to the function :py:func:`nipap.backend.Nipap.smart_search_prefix` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if search_options is None: search_options = {} xmlrpc = XMLRPCConnection() try: smart_result = xmlrpc.connection.smart_search_prefix( { 'query_string': query_string, 'search_options': search_options, 'auth': AuthOptions().options, 'extra_query': extra_query }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) result = dict() result['interpretation'] = smart_result['interpretation'] result['search_options'] = smart_result['search_options'] result['error'] = smart_result['error'] if 'error_message' in smart_result: result['error_message'] = smart_result['error_message'] result['result'] = list() for prefix in smart_result['result']: p = Prefix.from_dict(prefix) result['result'].append(p) return result
python
{ "resource": "" }
q18572
Prefix.list
train
def list(cls, spec=None): """ List prefixes. Maps to the function :py:func:`nipap.backend.Nipap.list_prefix` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ if spec is None: spec = {} xmlrpc = XMLRPCConnection() try: pref_list = xmlrpc.connection.list_prefix( { 'prefix': spec, 'auth': AuthOptions().options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) res = list() for pref in pref_list: p = Prefix.from_dict(pref) res.append(p) return res
python
{ "resource": "" }
q18573
Prefix.save
train
def save(self, args=None): """ Save prefix to NIPAP. If the object represents a new prefix unknown to NIPAP (attribute `id` is `None`) this function maps to the function :py:func:`nipap.backend.Nipap.add_prefix` in the backend, used to create a new prefix. Otherwise it maps to the function :py:func:`nipap.backend.Nipap.edit_prefix` in the backend, used to modify the VRF. Please see the documentation for the backend functions for information regarding input arguments and return values. """ if args is None: args = {} xmlrpc = XMLRPCConnection() data = { 'description': self.description, 'comment': self.comment, 'tags': [], 'node': self.node, 'type': self.type, 'country': self.country, 'order_id': self.order_id, 'customer_id': self.customer_id, 'external_key': self.external_key, 'alarm_priority': self.alarm_priority, 'monitor': self.monitor, 'vlan': self.vlan, 'avps': self.avps, 'expires': self.expires } if self.status is not None: data['status'] = self.status for tag_name in self.tags: data['tags'].append(tag_name) if self.vrf is not None: if not isinstance(self.vrf, VRF): raise NipapValueError("'vrf' attribute not instance of VRF class.") data['vrf_id'] = self.vrf.id # Prefix can be none if we are creating a new prefix # from a pool or other prefix! if self.prefix is not None: data['prefix'] = self.prefix if self.pool is None: data['pool_id'] = None else: if not isinstance(self.pool, Pool): raise NipapValueError("'pool' attribute not instance of Pool class.") data['pool_id'] = self.pool.id # New object, create from scratch if self.id is None: # format args x_args = {} if 'from-pool' in args: x_args['from-pool'] = { 'id': args['from-pool'].id } if 'family' in args: x_args['family'] = args['family'] if 'from-prefix' in args: x_args['from-prefix'] = args['from-prefix'] if 'prefix_length' in args: x_args['prefix_length'] = args['prefix_length'] try: prefix = xmlrpc.connection.add_prefix( { 'attr': data, 'args': x_args, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) # Old object, edit else: # Add authoritative source to data data['authoritative_source'] = self.authoritative_source try: # save prefixes = xmlrpc.connection.edit_prefix( { 'prefix': { 'id': self.id }, 'attr': data, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) if len(prefixes) != 1: raise NipapError('Prefix edit returned %d entries, should be 1.' % len(prefixes)) prefix = prefixes[0] # Refresh object data with attributes from add/edit operation Prefix.from_dict(prefix, self) # update cache _cache['Prefix'][self.id] = self if self.pool is not None: if self.pool.id in _cache['Pool']: del _cache['Pool'][self.pool.id]
python
{ "resource": "" }
q18574
Prefix.remove
train
def remove(self, recursive = False): """ Remove the prefix. Maps to the function :py:func:`nipap.backend.Nipap.remove_prefix` in the backend. Please see the documentation for the backend function for information regarding input arguments and return values. """ xmlrpc = XMLRPCConnection() try: xmlrpc.connection.remove_prefix( { 'prefix': { 'id': self.id }, 'recursive': recursive, 'auth': self._auth_opts.options }) except xmlrpclib.Fault as xml_fault: raise _fault_to_exception(xml_fault) # update cache if self.id in _cache['Prefix']: del(_cache['Prefix'][self.id]) if self.pool is not None: if self.pool.id in _cache['Pool']: del _cache['Pool'][self.pool.id]
python
{ "resource": "" }
q18575
Prefix.from_dict
train
def from_dict(cls, pref, prefix = None): """ Create a Prefix object from a dict. Suitable for creating Prefix objects from XML-RPC input. """ if prefix is None: prefix = Prefix() prefix.id = pref['id'] if pref['vrf_id'] is not None: # VRF is not mandatory prefix.vrf = VRF.get(pref['vrf_id']) prefix.family = pref['family'] prefix.prefix = pref['prefix'] prefix.display_prefix = pref['display_prefix'] prefix.description = pref['description'] prefix.comment = pref['comment'] prefix.node = pref['node'] if pref['pool_id'] is not None: # Pool is not mandatory prefix.pool = Pool.get(pref['pool_id']) prefix.type = pref['type'] prefix.indent = pref['indent'] prefix.country = pref['country'] prefix.order_id = pref['order_id'] prefix.customer_id = pref['customer_id'] prefix.external_key = pref['external_key'] prefix.authoritative_source = pref['authoritative_source'] prefix.alarm_priority = pref['alarm_priority'] prefix.monitor = pref['monitor'] prefix.vlan = pref['vlan'] prefix.added = pref['added'] prefix.last_modified = pref['last_modified'] prefix.total_addresses = int(pref['total_addresses']) prefix.used_addresses = int(pref['used_addresses']) prefix.free_addresses = int(pref['free_addresses']) prefix.status = pref['status'] prefix.avps = pref['avps'] prefix.expires = pref['expires'] prefix.inherited_tags = {} for tag_name in pref['inherited_tags']: tag = Tag.from_dict({'name': tag_name }) prefix.inherited_tags[tag_name] = tag prefix.tags = {} for tag_name in pref['tags']: tag = Tag.from_dict({'name': tag_name }) prefix.tags[tag_name] = tag if 'match' in pref: prefix.match = pref['match'] if 'display' in pref: prefix.display = pref['display'] if 'children' in pref: prefix.children = pref['children'] return prefix
python
{ "resource": "" }
q18576
VrfController.edit
train
def edit(self, id): """ Edit a VRF """ c.action = 'edit' c.edit_vrf = VRF.get(int(id)) # Did we have any action passed to us? if 'action' in request.params: if request.params['action'] == 'edit': if request.params['rt'].strip() == '': c.edit_vrf.rt = None else: c.edit_vrf.rt = request.params['rt'].strip() if request.params['name'].strip() == '': c.edit_vrf.name = None else: c.edit_vrf.name = request.params['name'].strip() c.edit_vrf.description = request.params['description'] c.edit_vrf.save() return render('/vrf_edit.html')
python
{ "resource": "" }
q18577
VrfController.remove
train
def remove(self, id): """ Removes a VRF. """ v = VRF.get(int(id)) v.remove() redirect(url(controller='vrf', action='list'))
python
{ "resource": "" }
q18578
setup_app
train
def setup_app(command, conf, vars): """Place any commands to setup nipapwww here""" # Don't reload the app if it was loaded under the testing environment if not pylons.test.pylonsapp: load_environment(conf.global_conf, conf.local_conf)
python
{ "resource": "" }
q18579
PrefixController.edit
train
def edit(self, id): """ Edit a prefix. """ # find prefix c.prefix = Prefix.get(int(id)) # we got a HTTP POST - edit object if request.method == 'POST': c.prefix.prefix = request.params['prefix_prefix'] c.prefix.description = request.params['prefix_description'] if request.params['prefix_node'].strip() == '': c.prefix.node = None else: c.prefix.node = request.params['prefix_node'] if request.params['prefix_country'].strip() == '': c.prefix.country = None else: c.prefix.country = request.params['prefix_country'] if request.params['prefix_comment'].strip() == '': c.prefix.comment = None else: c.prefix.comment = request.params['prefix_comment'] if request.params['prefix_order_id'].strip() == '': c.prefix.order_id = None else: c.prefix.order_id = request.params['prefix_order_id'] if request.params['prefix_customer_id'].strip() == '': c.prefix.customer_id = None else: c.prefix.customer_id = request.params['prefix_customer_id'] if request.params['prefix_vrf'].strip() == '': c.prefix.vrf = None else: # TODO: handle non-existent VRF... c.prefix.vrf = VRF.list({ 'rt': request.params['prefix_vrf'] })[0] if request.params.get('prefix_monitor') is not None: c.prefix.monitor = True else: c.prefix.monitor = False c.prefix.alarm_priority = request.params['prefix_alarm_priority'] c.prefix.save() redirect(url(controller='prefix', action='list')) return render('/prefix_edit.html')
python
{ "resource": "" }
q18580
ConfigExport.get_prefixes
train
def get_prefixes(self, query): """ Get prefix data from NIPAP """ try: res = Prefix.smart_search(query, {}) except socket.error: print >> sys.stderr, "Connection refused, please check hostname & port" sys.exit(1) except xmlrpclib.ProtocolError: print >> sys.stderr, "Authentication failed, please check your username / password" sys.exit(1) for p in res['result']: p.prefix_ipy = IPy.IP(p.prefix) self.prefixes.append(p)
python
{ "resource": "" }
q18581
ConfigExport.write_conf
train
def write_conf(self): """ Write the config to file """ f = open(self.output_filename, 'w') print(self.t.render(prefixes=self.prefixes), file=f) f.close()
python
{ "resource": "" }
q18582
_mangle_prefix
train
def _mangle_prefix(res): """ Mangle prefix result """ # fugly cast from large numbers to string to deal with XML-RPC res['total_addresses'] = unicode(res['total_addresses']) res['used_addresses'] = unicode(res['used_addresses']) res['free_addresses'] = unicode(res['free_addresses']) # postgres has notion of infinite while datetime hasn't, if expires # is equal to the max datetime we assume it is infinity and instead # represent that as None if res['expires'].tzinfo is None: res['expires'] = pytz.utc.localize(res['expires']) if res['expires'] == pytz.utc.localize(datetime.datetime.max): res['expires'] = None return res
python
{ "resource": "" }
q18583
requires_auth
train
def requires_auth(f): """ Class decorator for XML-RPC functions that requires auth """ @wraps(f) def decorated(self, *args, **kwargs): """ """ # Fetch auth options from args auth_options = {} nipap_args = {} # validate function arguments if len(args) == 1: nipap_args = args[0] else: self.logger.debug("Malformed request: got %d parameters" % len(args)) raise Fault(1000, ("NIPAP API functions take exactly 1 argument (%d given)") % len(args)) if type(nipap_args) != dict: self.logger.debug("Function argument is not struct") raise Fault(1000, ("Function argument must be XML-RPC struct/Python dict (Python %s given)." % type(nipap_args).__name__ )) # fetch auth options try: auth_options = nipap_args['auth'] if type(auth_options) is not dict: raise ValueError() except (KeyError, ValueError): self.logger.debug("Missing/invalid authentication options in request.") raise Fault(1000, ("Missing/invalid authentication options in request.")) # fetch authoritative source try: auth_source = auth_options['authoritative_source'] except KeyError: self.logger.debug("Missing authoritative source in auth options.") raise Fault(1000, ("Missing authoritative source in auth options.")) if not request.authorization: return authenticate() # init AuthFacory() af = AuthFactory() auth = af.get_auth(request.authorization.username, request.authorization.password, auth_source, auth_options or {}) # authenticated? if not auth.authenticate(): self.logger.debug("Incorrect username or password.") raise Fault(1510, ("Incorrect username or password.")) # Replace auth options in API call arguments with auth object new_args = dict(args[0]) new_args['auth'] = auth return f(self, *(new_args,), **kwargs) return decorated
python
{ "resource": "" }
q18584
NipapXMLRPC.echo
train
def echo(self, args): """ An echo function An API test function which simply echoes what is is passed in the 'message' element in the args-dict.. Valid keys in the `args`-struct: * `auth` [struct] Authentication options passed to the :class:`AuthFactory`. * `message` [string] String to echo. * `sleep` [integer] Number of seconds to sleep before echoing. Returns a string. """ if args.get('sleep'): time.sleep(args.get('sleep')) if args.get('message') is not None: return args.get('message')
python
{ "resource": "" }
q18585
NipapXMLRPC.find_free_prefix
train
def find_free_prefix(self, args): """ Find a free prefix. Valid keys in the `args`-struct: * `auth` [struct] Authentication options passed to the :class:`AuthFactory`. * `args` [struct] Arguments for the find_free_prefix-function such as what prefix or pool to allocate from. """ try: return self.nip.find_free_prefix(args.get('auth'), args.get('vrf'), args.get('args')) except NipapError as exc: self.logger.debug(unicode(exc)) raise Fault(exc.error_code, unicode(exc))
python
{ "resource": "" }
q18586
NewsFile._readfile
train
def _readfile(self, filename): """ Read content of specified NEWS file """ f = open(filename) self.content = f.readlines() f.close()
python
{ "resource": "" }
q18587
DchFile._parse
train
def _parse(self): """ Parse content of DCH file """ cur_ver = None cur_line = None for line in self.content: m = re.match('[^ ]+ \(([0-9]+\.[0-9]+\.[0-9]+)-[0-9]+\) [^ ]+; urgency=[^ ]+', line) if m: cur_ver = m.group(1) self.versions.append(cur_ver) self.entries[cur_ver] = [] cur_entry = self.entries[cur_ver] if self.latest_version is None or StrictVersion(cur_ver) > StrictVersion(self.latest_version): self.latest_version = m.group(1) elif cur_ver: m = re.match(' \* (.*)', line) if m: cur_entry.append(m.group(1).strip()) elif not re.match('$', line) and re.match(' *[^$]+', line): cur_entry[-1] += " " + line.strip()
python
{ "resource": "" }
q18588
JsonFormatter.format
train
def format(self, record: logging.LogRecord) -> str: """ Formats a record and serializes it as a JSON str. If record message isnt already a dict, initializes a new dict and uses `default_msg_fieldname` as a key as the record msg as the value. """ msg: Union[str, dict] = record.msg if not isinstance(record.msg, dict): msg = {self.default_msg_fieldname: msg} if record.exc_info: # type: ignore msg["exc_info"] = record.exc_info if record.exc_text: # type: ignore msg["exc_text"] = record.exc_text # type: ignore return self.serializer(msg, default=self._default_handler)
python
{ "resource": "" }
q18589
BaseAsyncRotatingFileHandler.rotation_filename
train
def rotation_filename(self, default_name: str) -> str: """ Modify the filename of a log file when rotating. This is provided so that a custom filename can be provided. :param default_name: The default name for the log file. """ if self.namer is None: return default_name return self.namer(default_name)
python
{ "resource": "" }
q18590
BaseAsyncRotatingFileHandler.rotate
train
async def rotate(self, source: str, dest: str): """ When rotating, rotate the current log. The default implementation calls the 'rotator' attribute of the handler, if it's callable, passing the source and dest arguments to it. If the attribute isn't callable (the default is None), the source is simply renamed to the destination. :param source: The source filename. This is normally the base filename, e.g. 'test.log' :param dest: The destination filename. This is normally what the source is rotated to, e.g. 'test.log.1'. """ if self.rotator is None: # logging issue 18940: A file may not have been created if delay is True. if await self.loop.run_in_executor( None, lambda: os.path.exists(source) ): await self.loop.run_in_executor( # type: ignore None, lambda: os.rename(source, dest) ) else: self.rotator(source, dest)
python
{ "resource": "" }
q18591
AsyncTimedRotatingFileHandler.compute_rollover
train
def compute_rollover(self, current_time: int) -> int: """ Work out the rollover time based on the specified time. If we are rolling over at midnight or weekly, then the interval is already known. need to figure out is WHEN the next interval is. In other words, if you are rolling over at midnight, then your base interval is 1 day, but you want to start that one day clock at midnight, not now. So, we have to fudge the `rollover_at` value in order to trigger the first rollover at the right time. After that, the regular interval will take care of the rest. Note that this code doesn't care about leap seconds. :) """ result = current_time + self.interval if ( self.when == RolloverInterval.MIDNIGHT or self.when in RolloverInterval.WEEK_DAYS ): if self.utc: t = time.gmtime(current_time) else: t = time.localtime(current_time) current_hour = t[3] current_minute = t[4] current_second = t[5] current_day = t[6] # r is the number of seconds left between now and the next rotation if self.at_time is None: rotate_ts = ONE_DAY_IN_SECONDS else: rotate_ts = ( self.at_time.hour * 60 + self.at_time.minute ) * 60 + self.at_time.second r = rotate_ts - ( (current_hour * 60 + current_minute) * 60 + current_second ) if r < 0: # Rotate time is before the current time (for example when # self.rotateAt is 13:45 and it now 14:15), rotation is # tomorrow. r += ONE_DAY_IN_SECONDS current_day = (current_day + 1) % 7 result = current_time + r # If we are rolling over on a certain day, add in the number of days until # the next rollover, but offset by 1 since we just calculated the time # until the next day starts. There are three cases: # Case 1) The day to rollover is today; in this case, do nothing # Case 2) The day to rollover is further in the interval (i.e., today is # day 2 (Wednesday) and rollover is on day 6 (Sunday). Days to # next rollover is simply 6 - 2 - 1, or 3. # Case 3) The day to rollover is behind us in the interval (i.e., today # is day 5 (Saturday) and rollover is on day 3 (Thursday). # Days to rollover is 6 - 5 + 3, or 4. In this case, it's the # number of days left in the current week (1) plus the number # of days in the next week until the rollover day (3). # The calculations described in 2) and 3) above need to have a day added. # This is because the above time calculation takes us to midnight on this # day, i.e. the start of the next day. if self.when in RolloverInterval.WEEK_DAYS: day = current_day # 0 is Monday if day != self.day_of_week: if day < self.day_of_week: days_to_wait = self.day_of_week - day else: days_to_wait = 6 - day + self.day_of_week + 1 new_rollover_at = result + (days_to_wait * (60 * 60 * 24)) if not self.utc: dst_now = t[-1] dst_at_rollover = time.localtime(new_rollover_at)[-1] if dst_now != dst_at_rollover: if not dst_now: # DST kicks in before next rollover, so we need to deduct an hour addend = -ONE_HOUR_IN_SECONDS else: # DST bows out before next rollover, so we need to add an hour addend = ONE_HOUR_IN_SECONDS new_rollover_at += addend result = new_rollover_at return result
python
{ "resource": "" }
q18592
AsyncTimedRotatingFileHandler.get_files_to_delete
train
async def get_files_to_delete(self) -> List[str]: """ Determine the files to delete when rolling over. """ dir_name, base_name = os.path.split(self.absolute_file_path) file_names = await self.loop.run_in_executor( None, lambda: os.listdir(dir_name) ) result = [] prefix = base_name + "." plen = len(prefix) for file_name in file_names: if file_name[:plen] == prefix: suffix = file_name[plen:] if self.ext_match.match(suffix): result.append(os.path.join(dir_name, file_name)) if len(result) < self.backup_count: return [] else: return result[: len(result) - self.backup_count]
python
{ "resource": "" }
q18593
AsyncStreamHandler.handle
train
async def handle(self, record: LogRecord) -> bool: # type: ignore """ Conditionally emit the specified logging record. Emission depends on filters which may have been added to the handler. """ rv = self.filter(record) if rv: await self.emit(record) return rv
python
{ "resource": "" }
q18594
AsyncStreamHandler.emit
train
async def emit(self, record: LogRecord): # type: ignore """ Actually log the specified logging record to the stream. """ if self.writer is None: self.writer = await self._init_writer() try: msg = self.format(record) + self.terminator self.writer.write(msg.encode()) await self.writer.drain() except Exception: await self.handleError(record)
python
{ "resource": "" }
q18595
Logger.callHandlers
train
async def callHandlers(self, record): """ Pass a record to all relevant handlers. Loop through all handlers for this logger and its parents in the logger hierarchy. If no handler was found, raises an error. Stop searching up the hierarchy whenever a logger with the "propagate" attribute set to zero is found - that will be the last logger whose handlers are called. """ c = self found = 0 while c: for handler in c.handlers: found = found + 1 if record.levelno >= handler.level: await handler.handle(record) if not c.propagate: c = None # break out else: c = c.parent if found == 0: raise Exception("No handlers could be found for logger")
python
{ "resource": "" }
q18596
Logger.handle
train
async def handle(self, record): """ Call the handlers for the specified record. This method is used for unpickled records received from a socket, as well as those created locally. Logger-level filtering is applied. """ if (not self.disabled) and self.filter(record): await self.callHandlers(record)
python
{ "resource": "" }
q18597
Logger._make_log_task
train
def _make_log_task(self, level, msg, *args, **kwargs) -> Task: """ Creates an asyncio.Task for a msg if logging is enabled for level. Returns a dummy task otherwise. """ if not self.isEnabledFor(level): if self._dummy_task is None: self._dummy_task = self.__make_dummy_task() return self._dummy_task if kwargs.get("exc_info", False): if not isinstance(kwargs["exc_info"], BaseException): kwargs["exc_info"] = sys.exc_info() coro = self._log( # type: ignore level, msg, *args, caller=self.findCaller(False), **kwargs ) return self.loop.create_task(coro)
python
{ "resource": "" }
q18598
Logger.debug
train
def debug(self, msg, *args, **kwargs) -> Task: # type: ignore """ Log msg with severity 'DEBUG'. To pass exception information, use the keyword argument exc_info with a true value, e.g. await logger.debug("Houston, we have a %s", "thorny problem", exc_info=1) """ return self._make_log_task(logging.DEBUG, msg, args, **kwargs)
python
{ "resource": "" }
q18599
Logger.info
train
def info(self, msg, *args, **kwargs) -> Task: # type: ignore """ Log msg with severity 'INFO'. To pass exception information, use the keyword argument exc_info with a true value, e.g. await logger.info("Houston, we have an interesting problem", exc_info=1) """ return self._make_log_task(logging.INFO, msg, args, **kwargs)
python
{ "resource": "" }