_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q23700
|
gradient
|
train
|
def gradient(f, **kwargs):
"""Calculate the gradient of a grid of values.
Works for both regularly-spaced data, and grids with varying spacing.
Either `coordinates` or `deltas` must be specified, or `f` must be given as an
`xarray.DataArray` with attached coordinate and projection information. If `f` is an
`xarray.DataArray`, and `coordinates` or `deltas` are given, `f` will be converted to a
`pint.Quantity` and the gradient returned as a tuple of `pint.Quantity`, otherwise, if
neither `coordinates` nor `deltas` are given, the attached coordinate information belonging
to `axis` will be used and the gradient will be returned as a tuple of `xarray.DataArray`.
Parameters
----------
f : array-like
Array of values of which to calculate the derivative
coordinates : array-like, optional
Sequence of arrays containing the coordinate values corresponding to the
grid points in `f` in axis order.
deltas : array-like, optional
Sequence of arrays or scalars that specify the spacing between the grid points in `f`
in axis order. There should be one item less than the size of `f` along the applicable
axis.
axes : sequence, optional
Sequence of strings (if `f` is a `xarray.DataArray` and implicit conversion to
`pint.Quantity` is not used) or integers that specify the array axes along which to
take the derivatives. Defaults to all axes of `f`. If given, and used with
`coordinates` or `deltas`, its length must be less than or equal to that of the
`coordinates` or `deltas` given.
Returns
-------
tuple of array-like
The first derivative calculated along each specified axis of the original array
See Also
--------
laplacian, first_derivative
Notes
-----
`gradient` previously accepted `x` as a parameter for coordinate values. This has been
deprecated in 0.9 in favor of `coordinates`.
If this function is used without the `axes` parameter, the length of `coordinates` or
`deltas` (as applicable) should match the number of dimensions of `f`.
"""
pos_kwarg, positions, axes = _process_gradient_args(f, kwargs)
return tuple(first_derivative(f, axis=axis, **{pos_kwarg: positions[ind]})
for ind, axis in enumerate(axes))
|
python
|
{
"resource": ""
}
|
q23701
|
laplacian
|
train
|
def laplacian(f, **kwargs):
"""Calculate the laplacian of a grid of values.
Works for both regularly-spaced data, and grids with varying spacing.
Either `coordinates` or `deltas` must be specified, or `f` must be given as an
`xarray.DataArray` with attached coordinate and projection information. If `f` is an
`xarray.DataArray`, and `coordinates` or `deltas` are given, `f` will be converted to a
`pint.Quantity` and the gradient returned as a tuple of `pint.Quantity`, otherwise, if
neither `coordinates` nor `deltas` are given, the attached coordinate information belonging
to `axis` will be used and the gradient will be returned as a tuple of `xarray.DataArray`.
Parameters
----------
f : array-like
Array of values of which to calculate the derivative
coordinates : array-like, optional
The coordinate values corresponding to the grid points in `f`
deltas : array-like, optional
Spacing between the grid points in `f`. There should be one item less than the size
of `f` along the applicable axis.
axes : sequence, optional
Sequence of strings (if `f` is a `xarray.DataArray` and implicit conversion to
`pint.Quantity` is not used) or integers that specify the array axes along which to
take the derivatives. Defaults to all axes of `f`. If given, and used with
`coordinates` or `deltas`, its length must be less than or equal to that of the
`coordinates` or `deltas` given.
Returns
-------
array-like
The laplacian
See Also
--------
gradient, second_derivative
Notes
-----
`laplacian` previously accepted `x` as a parameter for coordinate values. This has been
deprecated in 0.9 in favor of `coordinates`.
If this function is used without the `axes` parameter, the length of `coordinates` or
`deltas` (as applicable) should match the number of dimensions of `f`.
"""
pos_kwarg, positions, axes = _process_gradient_args(f, kwargs)
derivs = [second_derivative(f, axis=axis, **{pos_kwarg: positions[ind]})
for ind, axis in enumerate(axes)]
laplac = sum(derivs)
if isinstance(derivs[0], xr.DataArray):
# Patch in the units that are dropped
laplac.attrs['units'] = derivs[0].attrs['units']
return laplac
|
python
|
{
"resource": ""
}
|
q23702
|
_broadcast_to_axis
|
train
|
def _broadcast_to_axis(arr, axis, ndim):
"""Handle reshaping coordinate array to have proper dimensionality.
This puts the values along the specified axis.
"""
if arr.ndim == 1 and arr.ndim < ndim:
new_shape = [1] * ndim
new_shape[axis] = arr.size
arr = arr.reshape(*new_shape)
return arr
|
python
|
{
"resource": ""
}
|
q23703
|
_process_gradient_args
|
train
|
def _process_gradient_args(f, kwargs):
"""Handle common processing of arguments for gradient and gradient-like functions."""
axes = kwargs.get('axes', range(f.ndim))
def _check_length(positions):
if 'axes' in kwargs and len(positions) < len(axes):
raise ValueError('Length of "coordinates" or "deltas" cannot be less than that '
'of "axes".')
elif 'axes' not in kwargs and len(positions) != len(axes):
raise ValueError('Length of "coordinates" or "deltas" must match the number of '
'dimensions of "f" when "axes" is not given.')
if 'deltas' in kwargs:
if 'coordinates' in kwargs or 'x' in kwargs:
raise ValueError('Cannot specify both "coordinates" and "deltas".')
_check_length(kwargs['deltas'])
return 'delta', kwargs['deltas'], axes
elif 'coordinates' in kwargs:
_check_length(kwargs['coordinates'])
return 'x', kwargs['coordinates'], axes
elif 'x' in kwargs:
warnings.warn('The use of "x" as a parameter for coordinate values has been '
'deprecated. Use "coordinates" instead.', metpyDeprecation)
_check_length(kwargs['x'])
return 'x', kwargs['x'], axes
elif isinstance(f, xr.DataArray):
return 'pass', axes, axes # only the axis argument matters
else:
raise ValueError('Must specify either "coordinates" or "deltas" for value positions '
'when "f" is not a DataArray.')
|
python
|
{
"resource": ""
}
|
q23704
|
_process_deriv_args
|
train
|
def _process_deriv_args(f, kwargs):
"""Handle common processing of arguments for derivative functions."""
n = f.ndim
axis = normalize_axis_index(kwargs.get('axis', 0), n)
if f.shape[axis] < 3:
raise ValueError('f must have at least 3 point along the desired axis.')
if 'delta' in kwargs:
if 'x' in kwargs:
raise ValueError('Cannot specify both "x" and "delta".')
delta = atleast_1d(kwargs['delta'])
if delta.size == 1:
diff_size = list(f.shape)
diff_size[axis] -= 1
delta_units = getattr(delta, 'units', None)
delta = np.broadcast_to(delta, diff_size, subok=True)
if delta_units is not None:
delta = delta * delta_units
else:
delta = _broadcast_to_axis(delta, axis, n)
elif 'x' in kwargs:
x = _broadcast_to_axis(kwargs['x'], axis, n)
delta = diff(x, axis=axis)
else:
raise ValueError('Must specify either "x" or "delta" for value positions.')
return n, axis, delta
|
python
|
{
"resource": ""
}
|
q23705
|
parse_angle
|
train
|
def parse_angle(input_dir):
"""Calculate the meteorological angle from directional text.
Works for abbrieviations or whole words (E -> 90 | South -> 180)
and also is able to parse 22.5 degreee angles such as ESE/East South East
Parameters
----------
input_dir : string or array-like strings
Directional text such as west, [south-west, ne], etc
Returns
-------
angle
The angle in degrees
"""
if isinstance(input_dir, str):
# abb_dirs = abbrieviated directions
abb_dirs = [_abbrieviate_direction(input_dir)]
elif isinstance(input_dir, list):
input_dir_str = ','.join(input_dir)
abb_dir_str = _abbrieviate_direction(input_dir_str)
abb_dirs = abb_dir_str.split(',')
return itemgetter(*abb_dirs)(DIR_DICT)
|
python
|
{
"resource": ""
}
|
q23706
|
Text.to_dict
|
train
|
def to_dict(self):
"""
Returns the underlying data as a Python dict.
"""
return {
"state_size": self.state_size,
"chain": self.chain.to_json(),
"parsed_sentences": self.parsed_sentences if self.retain_original else None
}
|
python
|
{
"resource": ""
}
|
q23707
|
Text.generate_corpus
|
train
|
def generate_corpus(self, text):
"""
Given a text string, returns a list of lists; that is, a list of
"sentences," each of which is a list of words. Before splitting into
words, the sentences are filtered through `self.test_sentence_input`
"""
if isinstance(text, str):
sentences = self.sentence_split(text)
else:
sentences = []
for line in text:
sentences += self.sentence_split(line)
passing = filter(self.test_sentence_input, sentences)
runs = map(self.word_split, passing)
return runs
|
python
|
{
"resource": ""
}
|
q23708
|
Text.from_chain
|
train
|
def from_chain(cls, chain_json, corpus=None, parsed_sentences=None):
"""
Init a Text class based on an existing chain JSON string or object
If corpus is None, overlap checking won't work.
"""
chain = Chain.from_json(chain_json)
return cls(corpus or None, parsed_sentences=parsed_sentences, state_size=chain.state_size, chain=chain)
|
python
|
{
"resource": ""
}
|
q23709
|
Chain.build
|
train
|
def build(self, corpus, state_size):
"""
Build a Python representation of the Markov model. Returns a dict
of dicts where the keys of the outer dict represent all possible states,
and point to the inner dicts. The inner dicts represent all possibilities
for the "next" item in the chain, along with the count of times it
appears.
"""
# Using a DefaultDict here would be a lot more convenient, however the memory
# usage is far higher.
model = {}
for run in corpus:
items = ([ BEGIN ] * state_size) + run + [ END ]
for i in range(len(run) + 1):
state = tuple(items[i:i+state_size])
follow = items[i+state_size]
if state not in model:
model[state] = {}
if follow not in model[state]:
model[state][follow] = 0
model[state][follow] += 1
return model
|
python
|
{
"resource": ""
}
|
q23710
|
Chain.move
|
train
|
def move(self, state):
"""
Given a state, choose the next item at random.
"""
if state == tuple([ BEGIN ] * self.state_size):
choices = self.begin_choices
cumdist = self.begin_cumdist
else:
choices, weights = zip(*self.model[state].items())
cumdist = list(accumulate(weights))
r = random.random() * cumdist[-1]
selection = choices[bisect.bisect(cumdist, r)]
return selection
|
python
|
{
"resource": ""
}
|
q23711
|
Chain.from_json
|
train
|
def from_json(cls, json_thing):
"""
Given a JSON object or JSON string that was created by `self.to_json`,
return the corresponding markovify.Chain.
"""
if isinstance(json_thing, basestring):
obj = json.loads(json_thing)
else:
obj = json_thing
if isinstance(obj, list):
rehydrated = dict((tuple(item[0]), item[1]) for item in obj)
elif isinstance(obj, dict):
rehydrated = obj
else:
raise ValueError("Object should be dict or list")
state_size = len(list(rehydrated.keys())[0])
inst = cls(None, state_size, rehydrated)
return inst
|
python
|
{
"resource": ""
}
|
q23712
|
PyJWS.register_algorithm
|
train
|
def register_algorithm(self, alg_id, alg_obj):
"""
Registers a new Algorithm for use when creating and verifying tokens.
"""
if alg_id in self._algorithms:
raise ValueError('Algorithm already has a handler.')
if not isinstance(alg_obj, Algorithm):
raise TypeError('Object is not of type `Algorithm`')
self._algorithms[alg_id] = alg_obj
self._valid_algs.add(alg_id)
|
python
|
{
"resource": ""
}
|
q23713
|
PyJWS.unregister_algorithm
|
train
|
def unregister_algorithm(self, alg_id):
"""
Unregisters an Algorithm for use when creating and verifying tokens
Throws KeyError if algorithm is not registered.
"""
if alg_id not in self._algorithms:
raise KeyError('The specified algorithm could not be removed'
' because it is not registered.')
del self._algorithms[alg_id]
self._valid_algs.remove(alg_id)
|
python
|
{
"resource": ""
}
|
q23714
|
get_default_algorithms
|
train
|
def get_default_algorithms():
"""
Returns the algorithms that are implemented by the library.
"""
default_algorithms = {
'none': NoneAlgorithm(),
'HS256': HMACAlgorithm(HMACAlgorithm.SHA256),
'HS384': HMACAlgorithm(HMACAlgorithm.SHA384),
'HS512': HMACAlgorithm(HMACAlgorithm.SHA512)
}
if has_crypto:
default_algorithms.update({
'RS256': RSAAlgorithm(RSAAlgorithm.SHA256),
'RS384': RSAAlgorithm(RSAAlgorithm.SHA384),
'RS512': RSAAlgorithm(RSAAlgorithm.SHA512),
'ES256': ECAlgorithm(ECAlgorithm.SHA256),
'ES384': ECAlgorithm(ECAlgorithm.SHA384),
'ES521': ECAlgorithm(ECAlgorithm.SHA512),
'ES512': ECAlgorithm(ECAlgorithm.SHA512), # Backward compat for #219 fix
'PS256': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA256),
'PS384': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA384),
'PS512': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA512)
})
return default_algorithms
|
python
|
{
"resource": ""
}
|
q23715
|
info
|
train
|
def info():
"""
Generate information for a bug report.
Based on the requests package help utility module.
"""
try:
platform_info = {"system": platform.system(), "release": platform.release()}
except IOError:
platform_info = {"system": "Unknown", "release": "Unknown"}
implementation = platform.python_implementation()
if implementation == "CPython":
implementation_version = platform.python_version()
elif implementation == "PyPy":
implementation_version = "%s.%s.%s" % (
sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro,
)
if sys.pypy_version_info.releaselevel != "final":
implementation_version = "".join(
[implementation_version, sys.pypy_version_info.releaselevel]
)
else:
implementation_version = "Unknown"
return {
"platform": platform_info,
"implementation": {"name": implementation, "version": implementation_version},
"cryptography": {"version": getattr(cryptography, "__version__", "")},
"pyjwt": {"version": pyjwt_version},
}
|
python
|
{
"resource": ""
}
|
q23716
|
Provider._authenticate
|
train
|
def _authenticate(self):
"""Authenticate with netcup server. Must be called first."""
login_info = self._apicall('login')
self.api_session_id = login_info['apisessionid']
if not self.api_session_id:
raise Exception('Login failed')
# query ttl and verify access to self.domain:
zone_info = self._apicall('infoDnsZone', domainname=self.domain)
self.zone_ttl = zone_info['ttl']
|
python
|
{
"resource": ""
}
|
q23717
|
Provider._create_record
|
train
|
def _create_record(self, rtype, name, content):
"""Create record. If it already exists, do nothing."""
if not self._list_records(rtype, name, content):
self._update_records([{}], {
'type': rtype,
'hostname': self._relative_name(name),
'destination': content,
'priority': self._get_lexicon_option('priority'),
})
LOGGER.debug('create_record: %s', True)
return True
|
python
|
{
"resource": ""
}
|
q23718
|
Provider._list_records
|
train
|
def _list_records(self, rtype=None, name=None, content=None):
"""List all records. Return an empty list if no records found.
``rtype``, ``name`` and ``content`` are used to filter records."""
records = [
{
'id': record['id'],
'type': record['type'],
'name': self._full_name(record['hostname']),
'content': record['destination'],
'priority': record['priority'],
'ttl': self.zone_ttl,
}
for record in self._raw_records(None, rtype, name, content)
]
LOGGER.debug('list_records: %s', records)
return records
|
python
|
{
"resource": ""
}
|
q23719
|
Provider._delete_record
|
train
|
def _delete_record(self, identifier=None, rtype=None, name=None, content=None):
"""Delete an existing record. If record does not exist, do nothing."""
records = self._raw_records(identifier, rtype, name, content)
LOGGER.debug('delete_records: %s', [rec['id'] for rec in records])
self._update_records(records, {
'deleterecord': True,
'type': rtype,
'hostname': name,
'destination': content,
})
LOGGER.debug('delete_record: %s', True)
return True
|
python
|
{
"resource": ""
}
|
q23720
|
Provider._raw_records
|
train
|
def _raw_records(self, identifier=None, rtype=None, name=None, content=None):
"""Return list of record dicts in the netcup API convention."""
record_fields = {
'id': identifier,
'type': rtype,
'hostname': name and self._relative_name(name),
'destination': content,
}
# type/hostname/destination of the dnsrecord type are mandatory (even
# when deleting), and must be queried if not all were specified:
if all(record_fields.values()):
return [record_fields]
data = self._apicall('infoDnsRecords', domainname=self.domain)
records = data.get('dnsrecords', [])
return [
record for record in records
if all(record[k] == v for k, v in record_fields.items() if v)
]
|
python
|
{
"resource": ""
}
|
q23721
|
Provider._update_records
|
train
|
def _update_records(self, records, data):
"""Insert or update a list of DNS records, specified in the netcup API
convention.
The fields ``hostname``, ``type``, and ``destination`` are mandatory
and must be provided either in the record dict or through ``data``!
"""
data = {k: v for k, v in data.items() if v}
records = [dict(record, **data) for record in records]
return self._apicall(
'updateDnsRecords',
domainname=self.domain,
dnsrecordset={'dnsrecords': records},
).get('dnsrecords', [])
|
python
|
{
"resource": ""
}
|
q23722
|
provider_parser
|
train
|
def provider_parser(subparser):
"""Configure provider parser for Rackspace"""
subparser.add_argument(
"--auth-account", help="specify account number for authentication")
subparser.add_argument(
"--auth-username",
help="specify username for authentication. Only used if --auth-token is empty.")
subparser.add_argument(
"--auth-api-key",
help="specify api key for authentication. Only used if --auth-token is empty.")
subparser.add_argument(
"--auth-token",
help=("specify token for authentication. "
"If empty, the username and api key will be used to create a token."))
subparser.add_argument("--sleep-time", type=float, default=1,
help="number of seconds to wait between update requests.")
|
python
|
{
"resource": ""
}
|
q23723
|
provider_parser
|
train
|
def provider_parser(subparser):
"""Specify arguments for AWS Route 53 Lexicon Provider."""
subparser.add_argument("--auth-access-key",
help="specify ACCESS_KEY for authentication")
subparser.add_argument("--auth-access-secret",
help="specify ACCESS_SECRET for authentication")
subparser.add_argument(
"--private-zone",
help=("indicates what kind of hosted zone to use. If true, use "
"only private zones. If false, use only public zones"))
# TODO: these are only required for testing, we should figure out
# a way to remove them & update the integration tests
# to dynamically populate the auth credentials that are required.
subparser.add_argument(
"--auth-username", help="alternative way to specify the ACCESS_KEY for authentication")
subparser.add_argument(
"--auth-token", help="alternative way to specify the ACCESS_SECRET for authentication")
|
python
|
{
"resource": ""
}
|
q23724
|
RecordSetPaginator.get_base_kwargs
|
train
|
def get_base_kwargs(self):
"""Get base kwargs for API call."""
kwargs = {
'HostedZoneId': self.hosted_zone_id
}
if self.max_items is not None:
kwargs.update({
'MaxItems': str(self.max_items)
})
return kwargs
|
python
|
{
"resource": ""
}
|
q23725
|
RecordSetPaginator.all_record_sets
|
train
|
def all_record_sets(self):
"""Generator to loop through current record set.
Call next page if it exists.
"""
is_truncated = True
start_record_name = None
start_record_type = None
kwargs = self.get_base_kwargs()
while is_truncated:
if start_record_name is not None:
kwargs.update({
'StartRecordName': start_record_name,
'StartRecordType': start_record_type
})
result = self.get_record_sets(**kwargs)
for record_set in result.get('ResourceRecordSets', []):
yield record_set
is_truncated = result.get('IsTruncated', False)
start_record_name = result.get('NextRecordName', None)
start_record_type = result.get('NextRecordType', None)
|
python
|
{
"resource": ""
}
|
q23726
|
Provider.filter_zone
|
train
|
def filter_zone(self, data):
"""Check if a zone is private"""
if self.private_zone is not None:
if data['Config']['PrivateZone'] != self.str2bool(self.private_zone):
return False
if data['Name'] != '{0}.'.format(self.domain):
return False
return True
|
python
|
{
"resource": ""
}
|
q23727
|
Provider._authenticate
|
train
|
def _authenticate(self):
"""Determine the hosted zone id for the domain."""
try:
hosted_zones = self.r53_client.list_hosted_zones_by_name()[
'HostedZones'
]
hosted_zone = next(
hz for hz in hosted_zones
if self.filter_zone(hz)
)
self.domain_id = hosted_zone['Id']
except StopIteration:
raise Exception('No domain found')
|
python
|
{
"resource": ""
}
|
q23728
|
Provider._create_record
|
train
|
def _create_record(self, rtype, name, content):
"""Create a record in the hosted zone."""
return self._change_record_sets('CREATE', rtype, name, content)
|
python
|
{
"resource": ""
}
|
q23729
|
Provider._update_record
|
train
|
def _update_record(self, identifier=None, rtype=None, name=None, content=None):
"""Update a record from the hosted zone."""
return self._change_record_sets('UPSERT', rtype, name, content)
|
python
|
{
"resource": ""
}
|
q23730
|
Provider._delete_record
|
train
|
def _delete_record(self, identifier=None, rtype=None, name=None, content=None):
"""Delete a record from the hosted zone."""
return self._change_record_sets('DELETE', rtype, name, content)
|
python
|
{
"resource": ""
}
|
q23731
|
Provider._list_records
|
train
|
def _list_records(self, rtype=None, name=None, content=None):
"""List all records for the hosted zone."""
records = []
paginator = RecordSetPaginator(self.r53_client, self.domain_id)
for record in paginator.all_record_sets():
if rtype is not None and record['Type'] != rtype:
continue
if name is not None and record['Name'] != self._fqdn_name(name):
continue
if record.get('AliasTarget', None) is not None:
record_content = [record['AliasTarget'].get('DNSName', None)]
if record.get('ResourceRecords', None) is not None:
record_content = [self._format_content(record['Type'], value['Value']) for value
in record['ResourceRecords']]
if content is not None and content not in record_content:
continue
LOGGER.debug('record: %s', record)
records.append({
'type': record['Type'],
'name': self._full_name(record['Name']),
'ttl': record.get('TTL', None),
'content': record_content[0] if len(record_content) == 1 else record_content,
})
LOGGER.debug('list_records: %s', records)
return records
|
python
|
{
"resource": ""
}
|
q23732
|
Provider._authenticate
|
train
|
def _authenticate(self):
"""
Authenticates against Easyname website and try to find out the domain
id.
Easyname uses a CSRF token in its login form, so two requests are
neccessary to actually login.
Returns:
bool: True if domain id was found.
Raises:
AssertionError: When a request returns unexpected or unknown data.
ValueError: When login data is wrong or the domain does not exist.
"""
csrf_token = self._get_csrf_token()
self._login(csrf_token)
domain_text_element = self._get_domain_text_of_authoritative_zone()
self.domain_id = self._get_domain_id(domain_text_element)
LOGGER.debug('Easyname domain ID: %s', self.domain_id)
return True
|
python
|
{
"resource": ""
}
|
q23733
|
Provider._create_record_internal
|
train
|
def _create_record_internal(self, rtype, name, content, identifier=None):
"""
Create a new DNS entry in the domain zone if it does not already exist.
Args:
rtype (str): The DNS type (e.g. A, TXT, MX, etc) of the new entry.
name (str): The name of the new DNS entry, e.g the domain for which a
MX entry shall be valid.
content (str): The content of the new DNS entry, e.g. the mail server
hostname for a MX entry.
[identifier] (str): The easyname id of a DNS entry. Use to overwrite an
existing entry.
Returns:
bool: True if the record was created successfully, False otherwise.
"""
name = self._relative_name(name) if name is not None else name
LOGGER.debug('Creating record with name %s', name)
if self._is_duplicate_record(rtype, name, content):
return True
data = self._get_post_data_to_create_dns_entry(rtype, name, content, identifier)
LOGGER.debug('Create DNS data: %s', data)
create_response = self.session.post(
self.URLS['dns_create_entry'].format(self.domain_id),
data=data
)
self._invalidate_records_cache()
self._log('Create DNS entry', create_response)
# Pull a list of records and check for ours
was_success = len(self._list_records(rtype, name, content)) > 0
if was_success:
msg = 'Successfully added record %s'
else:
msg = 'Failed to add record %s'
LOGGER.info(msg, name)
return was_success
|
python
|
{
"resource": ""
}
|
q23734
|
Provider._delete_record
|
train
|
def _delete_record(self, identifier=None, rtype=None, name=None, content=None):
"""
Delete one or more DNS entries in the domain zone that match the given
criteria.
Args:
[identifier] (str): An ID to match against DNS entry easyname IDs.
[rtype] (str): A DNS rtype (e.g. A, TXT, MX, etc) to match against DNS
entry types.
[name] (str): A name to match against DNS entry names.
[content] (str): A content to match against a DNS entry contents.
Returns:
bool: True if the record(s) were deleted successfully, False
otherwise.
"""
success_url = self.URLS['dns'].format(self.domain_id)
record_ids = self._get_matching_dns_entry_ids(identifier, rtype,
name, content)
LOGGER.debug('Record IDs to delete: %s', record_ids)
success = True
for rec_id in record_ids:
delete_response = self.session.get(
self.URLS['dns_delete_entry'].format(self.domain_id, rec_id))
self._invalidate_records_cache()
self._log('Delete DNS entry {}'.format(rec_id), delete_response)
success = success and delete_response.url == success_url
return success
|
python
|
{
"resource": ""
}
|
q23735
|
Provider._update_record
|
train
|
def _update_record(self, identifier, rtype=None, name=None, content=None):
"""
Update a DNS entry identified by identifier or name in the domain zone.
Any non given argument will leave the current value of the DNS entry.
Args:
identifier (str): The easyname id of the DNS entry to update.
[rtype] (str): The DNS rtype (e.g. A, TXT, MX, etc) of the new entry.
[name] (str): The name of the new DNS entry, e.g the domain for which
a MX entry shall be valid.
[content] (str): The content of the new DNS entry, e.g. the mail
server hostname for a MX entry.
Returns:
bool: True if the record was updated successfully, False otherwise.
Raises:
AssertionError: When a request returns unexpected or unknown data.
"""
if identifier is not None:
identifier = int(identifier)
records = self._list_records_internal(identifier=identifier)
else:
records = self._list_records_internal(name=name, rtype=rtype)
LOGGER.debug('Records to update (%d): %s', len(records), records)
assert records, 'No record found to update'
success = True
for record in records:
name = name if name is not None else record['name']
rtype = rtype if rtype is not None else record['type']
content = content if content is not None \
else record['content']
success = success and self._create_record_internal(
rtype, name, content, record['id'])
return success
|
python
|
{
"resource": ""
}
|
q23736
|
Provider._list_records_internal
|
train
|
def _list_records_internal(self, rtype=None, name=None, content=None, identifier=None):
"""
Filter and list DNS entries of domain zone on Easyname.
Easyname shows each entry in a HTML table row and each attribute on a
table column.
Args:
[rtype] (str): Filter by DNS rtype (e.g. A, TXT, MX, etc)
[name] (str): Filter by the name of the DNS entry, e.g the domain for
which a MX entry shall be valid.
[content] (str): Filter by the content of the DNS entry, e.g. the
mail server hostname for a MX entry.
[identifier] (str): Filter by the easyname id of the DNS entry.
Returns:
list: A list of DNS entries. A DNS entry is an object with DNS
attribute names as keys (e.g. name, content, priority, etc)
and additionally an id.
Raises:
AssertionError: When a request returns unexpected or unknown data.
"""
name = self._full_name(name) if name is not None else name
if self._records is None:
records = []
rows = self._get_dns_entry_trs()
for index, row in enumerate(rows):
self._log('DNS list entry', row)
try:
rec = {}
if row.has_attr('ondblclick'):
rec['id'] = int(row['ondblclick'].split(
'id=')[1].split("'")[0])
else:
rec['id'] = -index
columns = row.find_all('td')
rec['name'] = (columns[0].string or '').strip()
rec['type'] = (columns[1].contents[1] or '').strip()
rec['content'] = (columns[2].string or '').strip()
rec['priority'] = (columns[3].string or '').strip()
rec['ttl'] = (columns[4].string or '').strip()
if rec['priority']:
rec['priority'] = int(rec['priority'])
if rec['ttl']:
rec['ttl'] = int(rec['ttl'])
except Exception as error:
errmsg = 'Cannot parse DNS entry ({}).'.format(error)
LOGGER.warning(errmsg)
raise AssertionError(errmsg)
records.append(rec)
self._records = records
records = self._filter_records(self._records, rtype, name, content, identifier)
LOGGER.debug('Final records (%d): %s', len(records), records)
return records
|
python
|
{
"resource": ""
}
|
q23737
|
Provider._get_post_data_to_create_dns_entry
|
train
|
def _get_post_data_to_create_dns_entry(self, rtype, name, content, identifier=None):
"""
Build and return the post date that is needed to create a DNS entry.
"""
is_update = identifier is not None
if is_update:
records = self._list_records_internal(identifier=identifier)
assert len(records) == 1, 'ID is not unique or does not exist'
record = records[0]
LOGGER.debug('Create post data to update record: %s', record)
data = {
'id': str(identifier) if is_update else '',
'action': 'save',
'name': name,
'type': rtype,
'content': content,
'prio': str(record['priority']) if is_update else '10',
'ttl': str(record['ttl']) if is_update else '360',
'commit': ''
}
ttl = self._get_lexicon_option('ttl')
if ttl and ttl > 360:
data['ttl'] = str(ttl)
prio = self._get_lexicon_option('priority')
if prio and prio > 0:
data['prio'] = str(prio)
return data
|
python
|
{
"resource": ""
}
|
q23738
|
Provider._is_duplicate_record
|
train
|
def _is_duplicate_record(self, rtype, name, content):
"""Check if DNS entry already exists."""
records = self._list_records(rtype, name, content)
is_duplicate = len(records) >= 1
if is_duplicate:
LOGGER.info('Duplicate record %s %s %s, NOOP', rtype, name, content)
return is_duplicate
|
python
|
{
"resource": ""
}
|
q23739
|
Provider._get_matching_dns_entry_ids
|
train
|
def _get_matching_dns_entry_ids(self, identifier=None, rtype=None,
name=None, content=None):
"""Return a list of DNS entries that match the given criteria."""
record_ids = []
if not identifier:
records = self._list_records(rtype, name, content)
record_ids = [record['id'] for record in records]
else:
record_ids.append(identifier)
return record_ids
|
python
|
{
"resource": ""
}
|
q23740
|
Provider._get_dns_entry_trs
|
train
|
def _get_dns_entry_trs(self):
"""
Return the TR elements holding the DNS entries.
"""
from bs4 import BeautifulSoup
dns_list_response = self.session.get(
self.URLS['dns'].format(self.domain_id))
self._log('DNS list', dns_list_response)
assert dns_list_response.status_code == 200, \
'Could not load DNS entries.'
html = BeautifulSoup(dns_list_response.content, 'html.parser')
self._log('DNS list', html)
dns_table = html.find('table', {'id': 'cp_domains_dnseintraege'})
assert dns_table is not None, 'Could not find DNS entry table'
def _is_zone_tr(elm):
has_ondblclick = elm.has_attr('ondblclick')
has_class = elm.has_attr('class')
return elm.name.lower() == 'tr' and (has_class or has_ondblclick)
rows = dns_table.findAll(_is_zone_tr)
assert rows is not None and rows, 'Could not find any DNS entries'
return rows
|
python
|
{
"resource": ""
}
|
q23741
|
Provider._filter_records
|
train
|
def _filter_records(self, records, rtype=None, name=None, content=None, identifier=None): # pylint: disable=too-many-arguments,no-self-use
"""
Filter dns entries based on type, name or content.
"""
if not records:
return []
if identifier is not None:
LOGGER.debug('Filtering %d records by id: %s', len(records), identifier)
records = [record for record in records if record['id'] == identifier]
if rtype is not None:
LOGGER.debug('Filtering %d records by type: %s', len(records), rtype)
records = [record for record in records if record['type'] == rtype]
if name is not None:
LOGGER.debug('Filtering %d records by name: %s', len(records), name)
if name.endswith('.'):
name = name[:-1]
records = [record for record in records if name == record['name']]
if content is not None:
LOGGER.debug('Filtering %d records by content: %s', len(records), content.lower())
records = [record for record in records if
record['content'].lower() == content.lower()]
return records
|
python
|
{
"resource": ""
}
|
q23742
|
Provider._get_csrf_token
|
train
|
def _get_csrf_token(self):
"""Return the CSRF Token of easyname login form."""
from bs4 import BeautifulSoup
home_response = self.session.get(self.URLS['login'])
self._log('Home', home_response)
assert home_response.status_code == 200, \
'Could not load Easyname login page.'
html = BeautifulSoup(home_response.content, 'html.parser')
self._log('Home', html)
csrf_token_field = html.find('input', {'id': 'loginxtoken'})
assert csrf_token_field is not None, 'Could not find login token.'
return csrf_token_field['value']
|
python
|
{
"resource": ""
}
|
q23743
|
Provider._login
|
train
|
def _login(self, csrf_token):
"""Attempt to login session on easyname."""
login_response = self.session.post(
self.URLS['login'],
data={
'username': self._get_provider_option('auth_username') or '',
'password': self._get_provider_option('auth_password') or '',
'submit': '',
'loginxtoken': csrf_token,
}
)
self._log('Login', login_response)
assert login_response.status_code == 200, \
'Could not login due to a network error.'
assert login_response.url == self.URLS['overview'], \
'Easyname login failed, bad EASYNAME_USER or EASYNAME_PASS.'
|
python
|
{
"resource": ""
}
|
q23744
|
Provider._get_domain_text_of_authoritative_zone
|
train
|
def _get_domain_text_of_authoritative_zone(self):
"""Get the authoritative name zone."""
# We are logged in, so get the domain list
from bs4 import BeautifulSoup
zones_response = self.session.get(self.URLS['domain_list'])
self._log('Zone', zones_response)
assert zones_response.status_code == 200, \
'Could not retrieve domain list due to a network error.'
html = BeautifulSoup(zones_response.content, 'html.parser')
self._log('Zone', html)
domain_table = html.find('table', {'id': 'cp_domain_table'})
assert domain_table is not None, 'Could not find domain table'
# (Sub)domains can either be managed in their own zones or by the
# zones of their parent (sub)domains. Iterate over all subdomains
# (starting with the deepest one) and see if there is an own zone
# for it.
domain = self.domain or ''
domain_text = None
subdomains = domain.split('.')
while True:
domain = '.'.join(subdomains)
LOGGER.debug('Check if %s has own zone', domain)
domain_text = domain_table.find(string=domain)
if domain_text is not None or len(subdomains) < 3:
break
subdomains.pop(0)
# Update domain to equal the zone's domain. This is important if we are
# handling a subdomain that has no zone of itself. If we do not do
# this, self._relative_name will strip also a part of the subdomain
# away.
self.domain = domain
assert domain_text is not None, \
'The domain does not exist on Easyname.'
return domain_text
|
python
|
{
"resource": ""
}
|
q23745
|
Provider._get_domain_id
|
train
|
def _get_domain_id(self, domain_text_element): # pylint: disable=no-self-use
"""Return the easyname id of the domain."""
try:
# Hierarchy: TR > TD > SPAN > Domain Text
tr_anchor = domain_text_element.parent.parent.parent
td_anchor = tr_anchor.find('td', {'class': 'td_2'})
link = td_anchor.find('a')['href']
domain_id = link.rsplit('/', 1)[-1]
return domain_id
except Exception as error:
errmsg = ('Cannot get the domain id even though the domain seems '
'to exist (%s).', error)
LOGGER.warning(errmsg)
raise AssertionError(errmsg)
|
python
|
{
"resource": ""
}
|
q23746
|
Provider._log
|
train
|
def _log(self, name, element): # pylint: disable=no-self-use
"""
Log Response and Tag elements. Do nothing if elements is none of them.
"""
from bs4 import BeautifulSoup, Tag
if isinstance(element, Response):
LOGGER.debug('%s response: URL=%s Code=%s', name, element.url, element.status_code)
elif isinstance(element, (BeautifulSoup, Tag)):
LOGGER.debug('%s HTML:\n%s', name, element)
|
python
|
{
"resource": ""
}
|
q23747
|
find_providers
|
train
|
def find_providers():
"""Find all providers registered in Lexicon, and their availability"""
providers_list = sorted({modname for (_, modname, _)
in pkgutil.iter_modules(providers.__path__)
if modname != 'base'})
try:
distribution = pkg_resources.get_distribution('dns-lexicon')
except pkg_resources.DistributionNotFound:
return {provider: True for provider in providers_list}
else:
return {provider: _resolve_requirements(provider, distribution)
for provider in providers_list}
|
python
|
{
"resource": ""
}
|
q23748
|
provider_parser
|
train
|
def provider_parser(subparser):
"""Configure a provider parser for Hetzner"""
subparser.add_argument('--auth-account',
help='specify type of Hetzner account: by default Hetzner Robot '
'(robot) or Hetzner konsoleH (konsoleh)')
subparser.add_argument('--auth-username', help='specify username of Hetzner account')
subparser.add_argument('--auth-password', help='specify password of Hetzner account')
subparser.add_argument('--linked',
help='if exists, uses linked CNAME as A|AAAA|TXT record name for edit '
'actions: by default (yes); Further restriction: Only enabled if '
'record name or raw FQDN record identifier \'type/name/content\' is '
'specified, and additionally for update actions the record name '
'remains the same',
default=str('yes'),
choices=['yes', 'no'])
subparser.add_argument('--propagated',
help='waits until record is publicly propagated after succeeded '
'create|update actions: by default (yes)',
default=str('yes'),
choices=['yes', 'no'])
subparser.add_argument('--latency',
help='specify latency, used during checks for publicly propagation '
'and additionally for Hetzner Robot after record edits: by default '
'30s (30)',
default=int(30),
type=int)
|
python
|
{
"resource": ""
}
|
q23749
|
Provider._create_record
|
train
|
def _create_record(self, rtype, name, content):
"""
Connects to Hetzner account, adds a new record to the zone and returns a
boolean, if creation was successful or not. Needed record rtype, name and
content for record to create.
"""
with self._session(self.domain, self.domain_id) as ddata:
# Validate method parameters
if not rtype or not name or not content:
LOGGER.warning('Hetzner => Record has no rtype|name|content specified')
return False
# Add record to zone
name = ddata['cname'] if ddata['cname'] else self._fqdn_name(name)
rrset = ddata['zone']['data'].get_rdataset(name, rdtype=rtype, create=True)
for rdata in rrset:
if self._convert_content(rtype, content) == rdata.to_text():
LOGGER.info('Hetzner => Record with content \'%s\' already exists',
content)
return True
ttl = (rrset.ttl if 0 < rrset.ttl < self._get_lexicon_option('ttl')
else self._get_lexicon_option('ttl'))
rdataset = dns.rdataset.from_text(rrset.rdclass, rrset.rdtype,
ttl, self._convert_content(rtype, content))
rrset.update(rdataset)
# Post zone to Hetzner
synced_change = self._post_zone(ddata['zone'])
if synced_change:
self._propagated_record(rtype, name, self._convert_content(rtype, content),
ddata['nameservers'])
return synced_change
|
python
|
{
"resource": ""
}
|
q23750
|
Provider._list_records
|
train
|
def _list_records(self, rtype=None, name=None, content=None):
"""
Connects to Hetzner account and returns a list of records filtered by record
rtype, name and content. The list is empty if no records found.
"""
with self._session(self.domain, self.domain_id) as ddata:
name = self._fqdn_name(name) if name else None
return self._list_records_in_zone(ddata['zone']['data'], rtype, name, content)
|
python
|
{
"resource": ""
}
|
q23751
|
Provider._delete_record
|
train
|
def _delete_record(self, identifier=None, rtype=None, name=None, content=None):
"""
Connects to Hetzner account, removes an existing record from the zone and returns a
boolean, if deletion was successful or not. Uses identifier or rtype, name & content to
lookup over all records of the zone for one or more records to delete.
"""
with self._session(self.domain, self.domain_id) as ddata:
# Validate method parameters
if identifier:
rtype, name, content = self._parse_identifier(identifier, ddata['zone']['data'])
if rtype is None or name is None or content is None:
LOGGER.info('Hetzner => Record with identifier \'%s\' does not exist',
identifier)
return True
name = ddata['cname'] if ddata['cname'] else (self._fqdn_name(name) if name else None)
records = self._list_records_in_zone(ddata['zone']['data'], rtype, name, content)
if records:
# Remove records from zone
for record in records:
rrset = ddata['zone']['data'].get_rdataset(record['name'] + '.',
rdtype=record['type'])
rdatas = []
for rdata in rrset:
if self._convert_content(record['type'],
record['content']) != rdata.to_text():
rdatas.append(rdata.to_text())
if rdatas:
rdataset = dns.rdataset.from_text_list(rrset.rdclass, rrset.rdtype,
record['ttl'], rdatas)
ddata['zone']['data'].replace_rdataset(record['name'] + '.', rdataset)
else:
ddata['zone']['data'].delete_rdataset(record['name'] + '.', record['type'])
# Post zone to Hetzner
synced_change = self._post_zone(ddata['zone'])
return synced_change
LOGGER.info('Hetzner => Record lookup has no matches')
return True
|
python
|
{
"resource": ""
}
|
q23752
|
Provider._create_identifier
|
train
|
def _create_identifier(rdtype, name, content):
"""
Creates hashed identifier based on full qualified record type, name & content
and returns hash.
"""
sha256 = hashlib.sha256()
sha256.update((rdtype + '/').encode('UTF-8'))
sha256.update((name + '/').encode('UTF-8'))
sha256.update(content.encode('UTF-8'))
return sha256.hexdigest()[0:7]
|
python
|
{
"resource": ""
}
|
q23753
|
Provider._parse_identifier
|
train
|
def _parse_identifier(self, identifier, zone=None):
"""
Parses the record identifier and returns type, name & content of the associated record
as tuple. The tuple is empty if no associated record found.
"""
rdtype, name, content = None, None, None
if len(identifier) > 7:
parts = identifier.split('/')
rdtype, name, content = parts[0], parts[1], '/'.join(parts[2:])
else:
records = self._list_records_in_zone(zone)
for record in records:
if record['id'] == identifier:
rdtype, name, content = record['type'], record['name'] + '.', record['content']
return rdtype, name, content
|
python
|
{
"resource": ""
}
|
q23754
|
Provider._convert_content
|
train
|
def _convert_content(self, rdtype, content):
"""
Converts type dependent record content into well formed and fully qualified
content for domain zone and returns content.
"""
if rdtype == 'TXT':
if content[0] != '"':
content = '"' + content
if content[-1] != '"':
content += '"'
if rdtype in ('CNAME', 'MX', 'NS', 'SRV'):
if content[-1] != '.':
content = self._fqdn_name(content)
return content
|
python
|
{
"resource": ""
}
|
q23755
|
Provider._list_records_in_zone
|
train
|
def _list_records_in_zone(self, zone, rdtype=None, name=None, content=None):
"""
Iterates over all records of the zone and returns a list of records filtered
by record type, name and content. The list is empty if no records found.
"""
records = []
rrsets = zone.iterate_rdatasets() if zone else []
for rname, rdataset in rrsets:
rtype = dns.rdatatype.to_text(rdataset.rdtype)
if ((not rdtype or rdtype == rtype)
and (not name or name == rname.to_text())):
for rdata in rdataset:
rdata = rdata.to_text()
if not content or self._convert_content(rtype, content) == rdata:
raw_rdata = self._clean_TXT_record({'type': rtype,
'content': rdata})['content']
data = {
'type': rtype,
'name': rname.to_text(True),
'ttl': int(rdataset.ttl),
'content': raw_rdata,
'id': Provider._create_identifier(rtype, rname.to_text(), raw_rdata)
}
records.append(data)
return records
|
python
|
{
"resource": ""
}
|
q23756
|
Provider._request
|
train
|
def _request(self, action='GET', url='/', data=None, query_params=None):
"""
Requests to Hetzner by current session and returns the response.
"""
if data is None:
data = {}
if query_params is None:
query_params = {}
response = self.session.request(action, self.api[self.account]['endpoint'] + url,
params=query_params, data=data)
response.raise_for_status()
return response
|
python
|
{
"resource": ""
}
|
q23757
|
Provider._dns_lookup
|
train
|
def _dns_lookup(name, rdtype, nameservers=None):
"""
Looks on specified or default system domain nameservers to resolve record type
& name and returns record set. The record set is empty if no propagated
record found.
"""
rrset = dns.rrset.from_text(name, 0, 1, rdtype)
try:
resolver = dns.resolver.Resolver()
resolver.lifetime = 1
if nameservers:
resolver.nameservers = nameservers
rrset = resolver.query(name, rdtype)
for rdata in rrset:
LOGGER.debug('DNS Lookup => %s %s %s %s',
rrset.name.to_text(), dns.rdataclass.to_text(rrset.rdclass),
dns.rdatatype.to_text(rrset.rdtype), rdata.to_text())
except dns.exception.DNSException as error:
LOGGER.debug('DNS Lookup => %s', error)
return rrset
|
python
|
{
"resource": ""
}
|
q23758
|
Provider._get_nameservers
|
train
|
def _get_nameservers(domain):
"""
Looks for domain nameservers and returns the IPs of the nameservers as a list.
The list is empty, if no nameservers were found. Needed associated domain zone
name for lookup.
"""
nameservers = []
rdtypes_ns = ['SOA', 'NS']
rdtypes_ip = ['A', 'AAAA']
for rdtype_ns in rdtypes_ns:
for rdata_ns in Provider._dns_lookup(domain, rdtype_ns):
for rdtype_ip in rdtypes_ip:
for rdata_ip in Provider._dns_lookup(rdata_ns.to_text().split(' ')[0],
rdtype_ip):
if rdata_ip.to_text() not in nameservers:
nameservers.append(rdata_ip.to_text())
LOGGER.debug('DNS Lookup => %s IN NS %s', domain, ' '.join(nameservers))
return nameservers
|
python
|
{
"resource": ""
}
|
q23759
|
Provider._get_dns_cname
|
train
|
def _get_dns_cname(name, link=False):
"""
Looks for associated domain zone, nameservers and linked record name until no
more linked record name was found for the given fully qualified record name or
the CNAME lookup was disabled, and then returns the parameters as a tuple.
"""
resolver = dns.resolver.Resolver()
resolver.lifetime = 1
domain = dns.resolver.zone_for_name(name, resolver=resolver).to_text(True)
nameservers = Provider._get_nameservers(domain)
cname = None
links, max_links = 0, 5
while link:
if links >= max_links:
LOGGER.error('Hetzner => Record %s has more than %d linked CNAME '
'records. Reduce the amount of CNAME links!',
name, max_links)
raise AssertionError
qname = cname if cname else name
rrset = Provider._dns_lookup(qname, 'CNAME', nameservers)
if rrset:
links += 1
cname = rrset[0].to_text()
qdomain = dns.resolver.zone_for_name(cname, resolver=resolver).to_text(True)
if domain != qdomain:
domain = qdomain
nameservers = Provider._get_nameservers(qdomain)
else:
link = False
if cname:
LOGGER.info('Hetzner => Record %s has CNAME %s', name, cname)
return domain, nameservers, cname
|
python
|
{
"resource": ""
}
|
q23760
|
Provider._link_record
|
train
|
def _link_record(self):
"""
Checks restrictions for use of CNAME lookup and returns a tuple of the
fully qualified record name to lookup and a boolean, if a CNAME lookup
should be done or not. The fully qualified record name is empty if no
record name is specified by this provider.
"""
action = self._get_lexicon_option('action')
identifier = self._get_lexicon_option('identifier')
rdtype = self._get_lexicon_option('type')
name = (self._fqdn_name(self._get_lexicon_option('name'))
if self._get_lexicon_option('name') else None)
link = self._get_provider_option('linked')
qname = name
if identifier:
rdtype, name, _ = self._parse_identifier(identifier)
if action != 'list' and rdtype in ('A', 'AAAA', 'TXT') and name and link == 'yes':
if action != 'update' or name == qname or not qname:
LOGGER.info('Hetzner => Enable CNAME lookup '
'(see --linked parameter)')
return name, True
LOGGER.info('Hetzner => Disable CNAME lookup '
'(see --linked parameter)')
return name, False
|
python
|
{
"resource": ""
}
|
q23761
|
Provider._propagated_record
|
train
|
def _propagated_record(self, rdtype, name, content, nameservers=None):
"""
If the publicly propagation check should be done, waits until the domain nameservers
responses with the propagated record type, name & content and returns a boolean,
if the publicly propagation was successful or not.
"""
latency = self._get_provider_option('latency')
propagated = self._get_provider_option('propagated')
if propagated == 'yes':
retry, max_retry = 0, 20
while retry < max_retry:
for rdata in Provider._dns_lookup(name, rdtype, nameservers):
if content == rdata.to_text():
LOGGER.info('Hetzner => Record %s has %s %s', name, rdtype, content)
return True
retry += 1
retry_log = (', retry ({}/{}) in {}s...'.format((retry + 1), max_retry, latency)
if retry < max_retry else '')
LOGGER.info('Hetzner => Record is not propagated%s', retry_log)
time.sleep(latency)
return False
|
python
|
{
"resource": ""
}
|
q23762
|
Provider._filter_dom
|
train
|
def _filter_dom(dom, filters, last_find_all=False):
"""
If not exists, creates an DOM from a given session response, then filters the DOM
via given API filters and returns the filtered DOM. The DOM is empty if the filters
have no match.
"""
if isinstance(dom, string_types):
dom = BeautifulSoup(dom, 'html.parser')
for idx, find in enumerate(filters, start=1):
if not dom:
break
name, attrs = find.get('name'), find.get('attrs', {})
if len(filters) == idx and last_find_all:
dom = dom.find_all(name, attrs=attrs) if name else dom.find_all(attrs=attrs)
else:
dom = dom.find(name, attrs=attrs) if name else dom.find(attrs=attrs)
return dom
|
python
|
{
"resource": ""
}
|
q23763
|
Provider._extract_hidden_data
|
train
|
def _extract_hidden_data(dom):
"""
Extracts hidden input data from DOM and returns the data as dictionary.
"""
input_tags = dom.find_all('input', attrs={'type': 'hidden'})
data = {}
for input_tag in input_tags:
data[input_tag['name']] = input_tag['value']
return data
|
python
|
{
"resource": ""
}
|
q23764
|
Provider._extract_domain_id
|
train
|
def _extract_domain_id(string, regex):
"""
Extracts domain ID from given string and returns the domain ID.
"""
regex = re.compile(regex)
match = regex.search(string)
if not match:
return False
return str(match.group(1))
|
python
|
{
"resource": ""
}
|
q23765
|
Provider._auth_session
|
train
|
def _auth_session(self, username, password):
"""
Creates session to Hetzner account, authenticates with given credentials and
returns the session, if authentication was successful. Otherwise raises error.
"""
api = self.api[self.account]['auth']
endpoint = api.get('endpoint', self.api[self.account]['endpoint'])
session = requests.Session()
session_retries = Retry(total=10, backoff_factor=0.5)
session_adapter = requests.adapters.HTTPAdapter(max_retries=session_retries)
session.mount('https://', session_adapter)
response = session.request('GET', endpoint + api['GET'].get('url', '/'))
dom = Provider._filter_dom(response.text, api['filter'])
data = Provider._extract_hidden_data(dom)
data[api['user']], data[api['pass']] = username, password
response = session.request('POST', endpoint + api['POST']['url'], data=data)
if Provider._filter_dom(response.text, api['filter']):
LOGGER.error('Hetzner => Unable to authenticate session with %s account \'%s\': '
'Invalid credentials',
self.account, username)
raise AssertionError
LOGGER.info('Hetzner => Authenticate session with %s account \'%s\'',
self.account, username)
return session
|
python
|
{
"resource": ""
}
|
q23766
|
Provider._exit_session
|
train
|
def _exit_session(self):
"""
Exits session to Hetzner account and returns.
"""
api = self.api[self.account]
response = self._get(api['exit']['GET']['url'])
if not Provider._filter_dom(response.text, api['filter']):
LOGGER.info('Hetzner => Exit session')
else:
LOGGER.warning('Hetzner => Unable to exit session')
self.session = None
return True
|
python
|
{
"resource": ""
}
|
q23767
|
Provider._get_domain_id
|
train
|
def _get_domain_id(self, domain):
"""
Pulls all domains managed by authenticated Hetzner account, extracts their IDs
and returns the ID for the current domain, if exists. Otherwise raises error.
"""
api = self.api[self.account]['domain_id']
qdomain = dns.name.from_text(domain).to_unicode(True)
domains, last_count, page = {}, -1, 0
while last_count != len(domains):
last_count = len(domains)
page += 1
url = (api['GET'].copy()).get('url', '/').replace('<index>', str(page))
params = api['GET'].get('params', {}).copy()
for param in params:
params[param] = params[param].replace('<index>', str(page))
response = self._get(url, query_params=params)
domain_tags = Provider._filter_dom(response.text, api['filter'], True)
for domain_tag in domain_tags:
domain_id = Provider._extract_domain_id(dict(domain_tag.attrs)[api['id']['attr']],
api['id']['regex'])
domain = (Provider._filter_dom(domain_tag, api['domain'])
.renderContents().decode('UTF-8'))
domains[domain] = domain_id
if domain == qdomain:
LOGGER.info('Hetzner => Get ID %s for domain %s', domain_id, qdomain)
return domain_id
LOGGER.error('Hetzner => ID for domain %s does not exists', qdomain)
raise AssertionError
|
python
|
{
"resource": ""
}
|
q23768
|
Provider._get_zone
|
train
|
def _get_zone(self, domain, domain_id):
"""
Pulls the zone for the current domain from authenticated Hetzner account and
returns it as an zone object.
"""
api = self.api[self.account]
for request in api['zone']['GET']:
url = (request.copy()).get('url', '/').replace('<id>', domain_id)
params = request.get('params', {}).copy()
for param in params:
params[param] = params[param].replace('<id>', domain_id)
response = self._get(url, query_params=params)
dom = Provider._filter_dom(response.text, api['filter'])
zone_file_filter = [{'name': 'textarea', 'attrs': {'name': api['zone']['file']}}]
zone_file = Provider._filter_dom(dom, zone_file_filter).renderContents().decode('UTF-8')
hidden = Provider._extract_hidden_data(dom)
zone = {'data': dns.zone.from_text(zone_file, origin=domain, relativize=False),
'hidden': hidden}
LOGGER.info('Hetzner => Get zone for domain %s', domain)
return zone
|
python
|
{
"resource": ""
}
|
q23769
|
Provider._post_zone
|
train
|
def _post_zone(self, zone):
"""
Pushes updated zone for current domain to authenticated Hetzner account and
returns a boolean, if update was successful or not. Furthermore, waits until
the zone has been taken over, if it is a Hetzner Robot account.
"""
api = self.api[self.account]['zone']
data = zone['hidden']
data[api['file']] = zone['data'].to_text(relativize=True)
response = self._post(api['POST']['url'], data=data)
if Provider._filter_dom(response.text, api['filter']):
LOGGER.error('Hetzner => Unable to update zone for domain %s: Syntax error\n\n%s',
zone['data'].origin.to_unicode(True),
zone['data'].to_text(relativize=True).decode('UTF-8'))
return False
LOGGER.info('Hetzner => Update zone for domain %s',
zone['data'].origin.to_unicode(True))
if self.account == 'robot':
latency = self._get_provider_option('latency')
LOGGER.info('Hetzner => Wait %ds until Hetzner Robot has taken over zone...',
latency)
time.sleep(latency)
return True
|
python
|
{
"resource": ""
}
|
q23770
|
Provider._validate_response
|
train
|
def _validate_response(self, response, message, exclude_code=None): # pylint: disable=no-self-use
"""
validate an api server response
:param dict response: server response to check
:param str message: error message to raise
:param int exclude_code: error codes to exclude from errorhandling
:return:
":raises Exception: on error
"""
if 'code' in response and response['code'] >= 2000:
if exclude_code is not None and response['code'] == exclude_code:
return
raise Exception("{0}: {1} ({2})".format(
message, response['msg'], response['code']))
|
python
|
{
"resource": ""
}
|
q23771
|
Provider._authenticate
|
train
|
def _authenticate(self):
"""
run any request against the API just to make sure the credentials
are valid
:return bool: success status
:raises Exception: on error
"""
opts = {'domain': self._domain}
opts.update(self._auth)
response = self._api.domain.info(opts)
self._validate_response(
response=response, message='Failed to authenticate')
# set to fake id to pass tests, inwx doesn't work on domain id but
# uses domain names for identification
self.domain_id = 1
return True
|
python
|
{
"resource": ""
}
|
q23772
|
Provider._create_record
|
train
|
def _create_record(self, rtype, name, content):
"""
create a record
does nothing if the record already exists
:param str rtype: type of record
:param str name: name of record
:param mixed content: value of record
:return bool: success status
:raises Exception: on error
"""
opts = {'domain': self._domain, 'type': rtype.upper(),
'name': self._full_name(name), 'content': content}
if self._get_lexicon_option('ttl'):
opts['ttl'] = self._get_lexicon_option('ttl')
opts.update(self._auth)
response = self._api.nameserver.createRecord(opts)
self._validate_response(
response=response, message='Failed to create record',
exclude_code=2302)
return True
|
python
|
{
"resource": ""
}
|
q23773
|
Provider._list_records
|
train
|
def _list_records(self, rtype=None, name=None, content=None):
"""
list all records
:param str rtype: type of record
:param str name: name of record
:param mixed content: value of record
:return list: list of found records
:raises Exception: on error
"""
opts = {'domain': self._domain}
if rtype is not None:
opts['type'] = rtype.upper()
if name is not None:
opts['name'] = self._full_name(name)
if content is not None:
opts['content'] = content
opts.update(self._auth)
response = self._api.nameserver.info(opts)
self._validate_response(
response=response, message='Failed to get records')
records = []
if 'record' in response['resData']:
for record in response['resData']['record']:
processed_record = {
'type': record['type'],
'name': record['name'],
'ttl': record['ttl'],
'content': record['content'],
'id': record['id']
}
records.append(processed_record)
return records
|
python
|
{
"resource": ""
}
|
q23774
|
Provider._update_record
|
train
|
def _update_record(self, identifier, rtype=None, name=None, content=None):
"""
update a record
:param int identifier: identifier of record to update
:param str rtype: type of record
:param str name: name of record
:param mixed content: value of record
:return bool: success status
:raises Exception: on error
"""
record_ids = []
if not identifier:
records = self._list_records(rtype, name)
record_ids = [record['id'] for record in records]
else:
record_ids.append(identifier)
for an_identifier in record_ids:
opts = {'id': an_identifier}
if rtype is not None:
opts['type'] = rtype.upper()
if name is not None:
opts['name'] = self._full_name(name)
if content is not None:
opts['content'] = content
opts.update(self._auth)
response = self._api.nameserver.updateRecord(opts)
self._validate_response(
response=response, message='Failed to update record',
exclude_code=2302)
return True
|
python
|
{
"resource": ""
}
|
q23775
|
Provider.create_record
|
train
|
def create_record(self, rtype=None, name=None, content=None, **kwargs):
"""
Create record. If record already exists with the same content, do nothing.
"""
if not rtype and kwargs.get('type'):
warnings.warn('Parameter "type" is deprecated, use "rtype" instead.',
DeprecationWarning)
rtype = kwargs.get('type')
return self._create_record(rtype, name, content)
|
python
|
{
"resource": ""
}
|
q23776
|
Provider.list_records
|
train
|
def list_records(self, rtype=None, name=None, content=None, **kwargs):
"""
List all records. Return an empty list if no records found
type, name and content are used to filter records.
If possible filter during the query, otherwise filter after response is received.
"""
if not rtype and kwargs.get('type'):
warnings.warn('Parameter "type" is deprecated, use "rtype" instead.',
DeprecationWarning)
rtype = kwargs.get('type')
return self._list_records(rtype=rtype, name=name, content=content)
|
python
|
{
"resource": ""
}
|
q23777
|
Provider.update_record
|
train
|
def update_record(self, identifier, rtype=None, name=None, content=None, **kwargs):
"""
Update a record. Identifier must be specified.
"""
if not rtype and kwargs.get('type'):
warnings.warn('Parameter "type" is deprecated, use "rtype" instead.',
DeprecationWarning)
rtype = kwargs.get('type')
return self._update_record(identifier, rtype=rtype, name=name, content=content)
|
python
|
{
"resource": ""
}
|
q23778
|
Provider.delete_record
|
train
|
def delete_record(self, identifier=None, rtype=None, name=None, content=None, **kwargs):
"""
Delete an existing record.
If record does not exist, do nothing.
If an identifier is specified, use it, otherwise do a lookup using type, name and content.
"""
if not rtype and kwargs.get('type'):
warnings.warn('Parameter "type" is deprecated, use "rtype" instead.',
DeprecationWarning)
rtype = kwargs.get('type')
return self._delete_record(identifier=identifier, rtype=rtype, name=name, content=content)
|
python
|
{
"resource": ""
}
|
q23779
|
Provider.notify_slaves
|
train
|
def notify_slaves(self):
"""Checks to see if slaves should be notified, and notifies them if needed"""
if self.disable_slave_notify is not None:
LOGGER.debug('Slave notifications disabled')
return False
if self.zone_data()['kind'] == 'Master':
response_code = self._put('/zones/' + self.domain + '/notify').status_code
if response_code == 200:
LOGGER.debug('Slave(s) notified')
return True
LOGGER.debug('Slave notification failed with code %i', response_code)
else:
LOGGER.debug('Zone type should be \'Master\' for slave notifications')
return False
|
python
|
{
"resource": ""
}
|
q23780
|
Provider.zone_data
|
train
|
def zone_data(self):
"""Get zone data"""
if self._zone_data is None:
self._zone_data = self._get('/zones/' + self.domain).json()
return self._zone_data
|
python
|
{
"resource": ""
}
|
q23781
|
Provider._update_record
|
train
|
def _update_record(self, identifier, rtype=None, name=None, content=None):
"""Updates the specified record in a new Gandi zone
'content' should be a string or a list of strings
"""
if self.protocol == 'rpc':
return self.rpc_helper.update_record(identifier, rtype, name, content)
data = {}
if rtype:
data['rrset_type'] = rtype
if name:
data['rrset_name'] = self._relative_name(name)
if content:
if isinstance(content, (list, tuple, set)):
data['rrset_values'] = list(content)
else:
data['rrset_values'] = [content]
if rtype is not None:
# replace the records of a specific rtype
url = '/domains/{0}/records/{1}/{2}'.format(self.domain_id,
identifier or self._relative_name(
name),
rtype)
self._put(url, data)
else:
# replace all records with a matching name
url = '/domains/{0}/records/{1}'.format(self.domain_id,
identifier or self._relative_name(name))
self._put(url, {'items': [data]})
LOGGER.debug('update_record: %s', True)
return True
|
python
|
{
"resource": ""
}
|
q23782
|
GandiRPCSubProvider.authenticate
|
train
|
def authenticate(self):
"""Determine the current domain and zone IDs for the domain."""
try:
payload = self._api.domain.info(self._api_key, self._domain)
self._zone_id = payload['zone_id']
return payload['id']
except xmlrpclib.Fault as err:
raise Exception("Failed to authenticate: '{0}'".format(err))
|
python
|
{
"resource": ""
}
|
q23783
|
GandiRPCSubProvider.create_record
|
train
|
def create_record(self, rtype, name, content, ttl):
"""Creates a record for the domain in a new Gandi zone."""
version = None
ret = False
# This isn't quite "do nothing" if the record already exists.
# In this case, no new record will be created, but a new zone version
# will be created and set.
try:
version = self._api.domain.zone.version.new(
self._api_key, self._zone_id)
self._api.domain.zone.record.add(self._api_key, self._zone_id, version,
{'type': rtype.upper(),
'name': name,
'value': content,
'ttl': ttl
})
self._api.domain.zone.version.set(
self._api_key, self._zone_id, version)
ret = True
finally:
if not ret and version is not None:
self._api.domain.zone.version.delete(
self._api_key, self._zone_id, version)
LOGGER.debug("create_record: %s", ret)
return ret
|
python
|
{
"resource": ""
}
|
q23784
|
GandiRPCSubProvider.update_record
|
train
|
def update_record(self, identifier, rtype=None, name=None, content=None): # pylint: disable=too-many-branches
"""Updates the specified record in a new Gandi zone."""
if not identifier:
records = self.list_records(rtype, name)
if len(records) == 1:
identifier = records[0]['id']
elif len(records) > 1:
raise Exception('Several record identifiers match the request')
else:
raise Exception('Record identifier could not be found')
identifier = str(identifier)
version = None
# Gandi doesn't allow you to edit records on the active zone file.
# Gandi also doesn't persist zone record identifiers when creating
# a new zone file. To update by identifier, we lookup the record
# by identifier, then use the record fields to find the record in
# the newly created zone.
records = self._api.domain.zone.record.list(
self._api_key, self._zone_id, 0, {'id': identifier})
if len(records) == 1:
rec = records[0]
del rec['id']
try:
version = self._api.domain.zone.version.new(
self._api_key, self._zone_id)
records = self._api.domain.zone.record.list(
self._api_key, self._zone_id, version, rec)
if len(records) != 1:
raise self.GandiInternalError("expected one record")
if rtype is not None:
rec['type'] = rtype.upper()
if name is not None:
rec['name'] = self._relative_name(name)
if content is not None:
rec['value'] = self._txt_encode(
content) if rec['type'] == 'TXT' else content
records = self._api.domain.zone.record.update(
self._api_key, self._zone_id, version, {'id': records[0]['id']}, rec)
if len(records) != 1:
raise self.GandiInternalError(
"Expected one updated record")
self._api.domain.zone.version.set(
self._api_key, self._zone_id, version)
ret = True
except self.GandiInternalError:
pass
finally:
if not ret and version is not None:
self._api.domain.zone.version.delete(
self._api_key, self._zone_id, version)
LOGGER.debug("update_record: %s", ret)
return ret
|
python
|
{
"resource": ""
}
|
q23785
|
GandiRPCSubProvider.delete_record
|
train
|
def delete_record(self, identifier=None, rtype=None, name=None, content=None):
"""Removes the specified records in a new Gandi zone."""
version = None
ret = False
opts = {}
if identifier is not None:
opts['id'] = identifier
else:
if not rtype and not name and not content:
raise ValueError(
'Error, at least one parameter from type, name or content must be set')
if rtype:
opts['type'] = rtype.upper()
if name:
opts['name'] = self._relative_name(name)
if content:
opts['value'] = self._txt_encode(
content) if opts['type'] == 'TXT' else content
records = self._api.domain.zone.record.list(
self._api_key, self._zone_id, 0, opts)
if records:
try:
version = self._api.domain.zone.version.new(
self._api_key, self._zone_id)
for record in records:
del record['id']
self._api.domain.zone.record.delete(
self._api_key, self._zone_id, version, record)
self._api.domain.zone.version.set(
self._api_key, self._zone_id, version)
ret = True
finally:
if not ret and version is not None:
self._api.domain.zone.version.delete(
self._api_key, self._zone_id, version)
LOGGER.debug("delete_record: %s", ret)
return ret
|
python
|
{
"resource": ""
}
|
q23786
|
ConfigResolver.add_config_source
|
train
|
def add_config_source(self, config_source, position=None):
"""
Add a config source to the current ConfigResolver instance.
If position is not set, this source will be inserted with the lowest priority.
"""
rank = position if position is not None else len(self._config_sources)
self._config_sources.insert(rank, config_source)
|
python
|
{
"resource": ""
}
|
q23787
|
ConfigResolver.with_legacy_dict
|
train
|
def with_legacy_dict(self, legacy_dict_object):
"""Configure a source that consumes the dict that where used on Lexicon 2.x"""
warnings.warn(DeprecationWarning('Legacy configuration object has been used '
'to load the ConfigResolver.'))
return self.with_config_source(LegacyDictConfigSource(legacy_dict_object))
|
python
|
{
"resource": ""
}
|
q23788
|
provider_parser
|
train
|
def provider_parser(subparser):
"""Configure provider parser for CloudNS"""
identity_group = subparser.add_mutually_exclusive_group()
identity_group.add_argument(
"--auth-id", help="specify user id for authentication")
identity_group.add_argument(
"--auth-subid", help="specify subuser id for authentication")
identity_group.add_argument(
"--auth-subuser", help="specify subuser name for authentication")
subparser.add_argument(
"--auth-password", help="specify password for authentication")
subparser.add_argument("--weight", help="specify the SRV record weight")
subparser.add_argument("--port", help="specify the SRV record port")
|
python
|
{
"resource": ""
}
|
q23789
|
Provider._find_record
|
train
|
def _find_record(self, domain, _type=None):
"""search for a record on NS1 across zones. returns None if not found."""
def _is_matching(record):
"""filter function for records"""
if domain and record.get('domain', None) != domain:
return False
if _type and record.get('type', None) != _type:
return False
return True
payload = self._get('/search?q={0}&type=record'.format(domain))
for record in payload:
if _is_matching(record):
match = record
break
else:
# no such domain on ns1
return None
record = self._get(
'/zones/{0}/{1}/{2}'.format(match['zone'], match['domain'], match['type']))
if record.get('message', None):
return None # {"message":"record not found"}
short_answers = [x['answer'][0] for x in record['answers']]
# ensure a compatibility level with self._list_records
record['short_answers'] = short_answers
return record
|
python
|
{
"resource": ""
}
|
q23790
|
generate_list_table_result
|
train
|
def generate_list_table_result(lexicon_logger, output=None, without_header=None):
"""Convert returned data from list actions into a nice table for command line usage"""
if not isinstance(output, list):
lexicon_logger.debug('Command output is not a list, and then cannot '
'be printed with --quiet parameter not enabled.')
return None
array = [[
row.get('id', ''),
row.get('type', ''),
row.get('name', ''),
row.get('content', ''),
row.get('ttl', '')] for row in output]
# Insert header (insert before calculating the max width of each column
# to take headers size into account)
if not without_header:
headers = ['ID', 'TYPE', 'NAME', 'CONTENT', 'TTL']
array.insert(0, headers)
column_widths = [0, 0, 0, 0, 0]
# Find max width for each column
for row in array:
for idx, col in enumerate(row):
width = len(str(col))
if width > column_widths[idx]:
column_widths[idx] = width
# Add a 'nice' separator
if not without_header:
array.insert(1, ['-' * column_widths[idx]
for idx in range(len(column_widths))])
# Construct table to be printed
table = []
for row in array:
row_list = []
for idx, col in enumerate(row):
row_list.append(str(col).ljust(column_widths[idx]))
table.append(' '.join(row_list))
# Return table
return os.linesep.join(table)
|
python
|
{
"resource": ""
}
|
q23791
|
generate_table_results
|
train
|
def generate_table_results(output=None, without_header=None):
"""Convert returned data from non-list actions into a nice table for command line usage"""
array = []
str_output = str(output)
if not without_header:
array.append('RESULT')
array.append('-' * max(6, len(str_output)))
array.append(str_output)
return os.linesep.join(array)
|
python
|
{
"resource": ""
}
|
q23792
|
handle_output
|
train
|
def handle_output(results, output_type, action):
"""Print the relevant output for given output_type"""
if output_type == 'QUIET':
return
if not output_type == 'JSON':
if action == 'list':
table = generate_list_table_result(
logger, results, output_type == 'TABLE-NO-HEADER')
else:
table = generate_table_results(results, output_type == 'TABLE-NO-HEADER')
if table:
print(table)
else:
try:
json_str = json.dumps(results)
if json_str:
print(json_str)
except TypeError:
logger.debug('Output is not JSON serializable, and then cannot '
'be printed with --output=JSON parameter.')
|
python
|
{
"resource": ""
}
|
q23793
|
main
|
train
|
def main():
"""Main function of Lexicon."""
# Dynamically determine all the providers available and gather command line arguments.
parsed_args = generate_cli_main_parser().parse_args()
log_level = logging.getLevelName(parsed_args.log_level)
logging.basicConfig(stream=sys.stdout, level=log_level,
format='%(message)s')
logger.debug('Arguments: %s', parsed_args)
# In the CLI context, will get configuration interactively:
# * from the command line
# * from the environment variables
# * from lexicon configuration files found in given --config-dir (default is current dir)
config = ConfigResolver()
config.with_args(parsed_args).with_env().with_config_dir(parsed_args.config_dir)
client = Client(config)
results = client.execute()
handle_output(results, parsed_args.output, config.resolve('lexicon:action'))
|
python
|
{
"resource": ""
}
|
q23794
|
Client.execute
|
train
|
def execute(self):
"""Execute provided configuration in class constructor to the DNS records"""
self.provider.authenticate()
identifier = self.config.resolve('lexicon:identifier')
record_type = self.config.resolve('lexicon:type')
name = self.config.resolve('lexicon:name')
content = self.config.resolve('lexicon:content')
if self.action == 'create':
return self.provider.create_record(record_type, name, content)
if self.action == 'list':
return self.provider.list_records(record_type, name, content)
if self.action == 'update':
return self.provider.update_record(identifier, record_type, name, content)
if self.action == 'delete':
return self.provider.delete_record(identifier, record_type, name, content)
raise ValueError('Invalid action statement: {0}'.format(self.action))
|
python
|
{
"resource": ""
}
|
q23795
|
Provider._authenticate
|
train
|
def _authenticate(self):
"""Logs-in the user and checks the domain name"""
if not self._get_provider_option(
'auth_username') or not self._get_provider_option('auth_password'):
raise Exception(
'No valid authentication data passed, expected: auth-username and auth-password')
response = self._request_login(self._get_provider_option('auth_username'),
self._get_provider_option('auth_password'))
if 'ssid' in response:
self.ssid = response['ssid']
domains = self.domains_list()
if any((domain['name'] == self.domain for domain in domains)):
self.domain_id = self.domain
else:
raise Exception("Unknown domain {}".format(self.domain))
else:
raise Exception("No SSID provided by server")
|
python
|
{
"resource": ""
}
|
q23796
|
Provider._create_record
|
train
|
def _create_record(self, rtype, name, content):
"""Creates a new unique record"""
found = self._list_records(rtype=rtype, name=name, content=content)
if found:
return True
record = self._create_request_record(None, rtype, name, content,
self._get_lexicon_option('ttl'),
self._get_lexicon_option('priority'))
self._request_add_dns_record(record)
return True
|
python
|
{
"resource": ""
}
|
q23797
|
Provider._update_record
|
train
|
def _update_record(self, identifier, rtype=None, name=None, content=None):
"""Updates a record. Name changes are allowed, but the record identifier will change"""
if identifier is not None:
if name is not None:
records = self._list_records_internal(identifier=identifier)
if len(records) == 1 and records[0]['name'] != self._full_name(name):
# API does not allow us to update name directly
self._update_record_with_name(
records[0], rtype, name, content)
else:
self._update_record_with_id(identifier, rtype, content)
else:
self._update_record_with_id(identifier, rtype, content)
else:
guessed_record = self._guess_record(rtype, name)
self._update_record_with_id(guessed_record['id'], rtype, content)
return True
|
python
|
{
"resource": ""
}
|
q23798
|
Provider._update_record_with_id
|
train
|
def _update_record_with_id(self, identifier, rtype, content):
"""Updates existing record with no sub-domain name changes"""
record = self._create_request_record(identifier, rtype, None, content,
self._get_lexicon_option('ttl'),
self._get_lexicon_option('priority'))
self._request_modify_dns_record(record)
|
python
|
{
"resource": ""
}
|
q23799
|
Provider._update_record_with_name
|
train
|
def _update_record_with_name(self, old_record, rtype, new_name, content):
"""Updates existing record and changes it's sub-domain name"""
new_type = rtype if rtype else old_record['type']
new_ttl = self._get_lexicon_option('ttl')
if new_ttl is None and 'ttl' in old_record:
new_ttl = old_record['ttl']
new_priority = self._get_lexicon_option('priority')
if new_priority is None and 'priority' in old_record:
new_priority = old_record['priority']
new_content = content
if new_content is None and 'content' in old_record:
new_content = old_record['content']
record = self._create_request_record(None,
new_type,
new_name,
new_content,
new_ttl,
new_priority)
# This will be a different domain name, so no name collision should
# happen. First create a new entry and when it succeeds, delete the old
# one.
self._request_add_dns_record(record)
self._request_delete_dns_record_by_id(old_record['id'])
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.