sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def refresh(self):
"""Obtain a new access token from the refresh_token."""
if self.refresh_token is None:
raise InvalidInvocation("refresh token not provided")
self._request_token(
grant_type="refresh_token", refresh_token=self.refresh_token
)
|
Obtain a new access token from the refresh_token.
|
entailment
|
def revoke(self, only_access=False):
"""Revoke the current Authorization.
:param only_access: (Optional) When explicitly set to True, do not
evict the refresh token if one is set.
Revoking a refresh token will in-turn revoke all access tokens
associated with that authorization.
"""
if only_access or self.refresh_token is None:
super(Authorizer, self).revoke()
else:
self._authenticator.revoke_token(
self.refresh_token, "refresh_token"
)
self._clear_access_token()
self.refresh_token = None
|
Revoke the current Authorization.
:param only_access: (Optional) When explicitly set to True, do not
evict the refresh token if one is set.
Revoking a refresh token will in-turn revoke all access tokens
associated with that authorization.
|
entailment
|
def refresh(self):
"""Obtain a new access token."""
grant_type = "https://oauth.reddit.com/grants/installed_client"
self._request_token(grant_type=grant_type, device_id=self._device_id)
|
Obtain a new access token.
|
entailment
|
def refresh(self):
"""Obtain a new personal-use script type access token."""
self._request_token(
grant_type="password",
username=self._username,
password=self._password,
)
|
Obtain a new personal-use script type access token.
|
entailment
|
def request(self, *args, **kwargs):
"""Issue the HTTP request capturing any errors that may occur."""
try:
return self._http.request(*args, timeout=TIMEOUT, **kwargs)
except Exception as exc:
raise RequestException(exc, args, kwargs)
|
Issue the HTTP request capturing any errors that may occur.
|
entailment
|
def _hangul_char_to_jamo(syllable):
"""Return a 3-tuple of lead, vowel, and tail jamo characters.
Note: Non-Hangul characters are echoed back.
"""
if is_hangul_char(syllable):
rem = ord(syllable) - _JAMO_OFFSET
tail = rem % 28
vowel = 1 + ((rem - tail) % 588) // 28
lead = 1 + rem // 588
if tail:
return (chr(lead + _JAMO_LEAD_OFFSET),
chr(vowel + _JAMO_VOWEL_OFFSET),
chr(tail + _JAMO_TAIL_OFFSET))
else:
return (chr(lead + _JAMO_LEAD_OFFSET),
chr(vowel + _JAMO_VOWEL_OFFSET))
else:
return syllable
|
Return a 3-tuple of lead, vowel, and tail jamo characters.
Note: Non-Hangul characters are echoed back.
|
entailment
|
def _jamo_to_hangul_char(lead, vowel, tail=0):
"""Return the Hangul character for the given jamo characters.
"""
lead = ord(lead) - _JAMO_LEAD_OFFSET
vowel = ord(vowel) - _JAMO_VOWEL_OFFSET
tail = ord(tail) - _JAMO_TAIL_OFFSET if tail else 0
return chr(tail + (vowel - 1) * 28 + (lead - 1) * 588 + _JAMO_OFFSET)
|
Return the Hangul character for the given jamo characters.
|
entailment
|
def _get_unicode_name(char):
"""Fetch the unicode name for jamo characters.
"""
if char not in _JAMO_TO_NAME.keys() and char not in _HCJ_TO_NAME.keys():
raise InvalidJamoError("Not jamo or nameless jamo character", char)
else:
if is_hcj(char):
return _HCJ_TO_NAME[char]
return _JAMO_TO_NAME[char]
|
Fetch the unicode name for jamo characters.
|
entailment
|
def is_jamo(character):
"""Test if a single character is a jamo character.
Valid jamo includes all modern and archaic jamo, as well as all HCJ.
Non-assigned code points are invalid.
"""
code = ord(character)
return 0x1100 <= code <= 0x11FF or\
0xA960 <= code <= 0xA97C or\
0xD7B0 <= code <= 0xD7C6 or 0xD7CB <= code <= 0xD7FB or\
is_hcj(character)
|
Test if a single character is a jamo character.
Valid jamo includes all modern and archaic jamo, as well as all HCJ.
Non-assigned code points are invalid.
|
entailment
|
def is_jamo_modern(character):
"""Test if a single character is a modern jamo character.
Modern jamo includes all U+11xx jamo in addition to HCJ in modern usage,
as defined in Unicode 7.0.
WARNING: U+1160 is NOT considered a modern jamo character, but it is listed
under 'Medial Vowels' in the Unicode 7.0 spec.
"""
code = ord(character)
return 0x1100 <= code <= 0x1112 or\
0x1161 <= code <= 0x1175 or\
0x11A8 <= code <= 0x11C2 or\
is_hcj_modern(character)
|
Test if a single character is a modern jamo character.
Modern jamo includes all U+11xx jamo in addition to HCJ in modern usage,
as defined in Unicode 7.0.
WARNING: U+1160 is NOT considered a modern jamo character, but it is listed
under 'Medial Vowels' in the Unicode 7.0 spec.
|
entailment
|
def is_jamo_compound(character):
"""Test if a single character is a compound, i.e., a consonant
cluster, double consonant, or dipthong.
"""
if len(character) != 1:
return False
# Consider instead:
# raise TypeError('is_jamo_compound() expected a single character')
if is_jamo(character):
return character in JAMO_COMPOUNDS
return False
|
Test if a single character is a compound, i.e., a consonant
cluster, double consonant, or dipthong.
|
entailment
|
def get_jamo_class(jamo):
"""Determine if a jamo character is a lead, vowel, or tail.
Integers and U+11xx characters are valid arguments. HCJ consonants are not
valid here.
get_jamo_class should return the class ["lead" | "vowel" | "tail"] of a
given character or integer.
Note: jamo class directly corresponds to the Unicode 7.0 specification,
thus includes filler characters as having a class.
"""
# TODO: Perhaps raise a separate error for U+3xxx jamo.
if jamo in JAMO_LEADS or jamo == chr(0x115F):
return "lead"
if jamo in JAMO_VOWELS or jamo == chr(0x1160) or\
0x314F <= ord(jamo) <= 0x3163:
return "vowel"
if jamo in JAMO_TAILS:
return "tail"
else:
raise InvalidJamoError("Invalid or classless jamo argument.", jamo)
|
Determine if a jamo character is a lead, vowel, or tail.
Integers and U+11xx characters are valid arguments. HCJ consonants are not
valid here.
get_jamo_class should return the class ["lead" | "vowel" | "tail"] of a
given character or integer.
Note: jamo class directly corresponds to the Unicode 7.0 specification,
thus includes filler characters as having a class.
|
entailment
|
def hcj_to_jamo(hcj_char, position="vowel"):
"""Convert a HCJ character to a jamo character.
Arguments may be single characters along with the desired jamo class
(lead, vowel, tail). Non-mappable input will raise an InvalidJamoError.
"""
if position == "lead":
jamo_class = "CHOSEONG"
elif position == "vowel":
jamo_class = "JUNGSEONG"
elif position == "tail":
jamo_class = "JONGSEONG"
else:
raise InvalidJamoError("No mapping from input to jamo.", hcj_char)
jamo_name = re.sub("(?<=HANGUL )(\w+)",
jamo_class,
_get_unicode_name(hcj_char))
# TODO: add tests that test non entries.
if jamo_name in _JAMO_REVERSE_LOOKUP.keys():
return _JAMO_REVERSE_LOOKUP[jamo_name]
return hcj_char
|
Convert a HCJ character to a jamo character.
Arguments may be single characters along with the desired jamo class
(lead, vowel, tail). Non-mappable input will raise an InvalidJamoError.
|
entailment
|
def hangul_to_jamo(hangul_string):
"""Convert a string of Hangul to jamo.
Arguments may be iterables of characters.
hangul_to_jamo should split every Hangul character into U+11xx jamo
characters for any given string. Non-hangul characters are not changed.
hangul_to_jamo is the generator version of h2j, the string version.
"""
return (_ for _ in
chain.from_iterable(_hangul_char_to_jamo(_) for _ in
hangul_string))
|
Convert a string of Hangul to jamo.
Arguments may be iterables of characters.
hangul_to_jamo should split every Hangul character into U+11xx jamo
characters for any given string. Non-hangul characters are not changed.
hangul_to_jamo is the generator version of h2j, the string version.
|
entailment
|
def jamo_to_hangul(lead, vowel, tail=''):
"""Return the Hangul character for the given jamo input.
Integers corresponding to U+11xx jamo codepoints, U+11xx jamo characters,
or HCJ are valid inputs.
Outputs a one-character Hangul string.
This function is identical to j2h.
"""
# Internally, we convert everything to a jamo char,
# then pass it to _jamo_to_hangul_char
lead = hcj_to_jamo(lead, "lead")
vowel = hcj_to_jamo(vowel, "vowel")
if not tail or ord(tail) == 0:
tail = None
elif is_hcj(tail):
tail = hcj_to_jamo(tail, "tail")
if (is_jamo(lead) and get_jamo_class(lead) == "lead") and\
(is_jamo(vowel) and get_jamo_class(vowel) == "vowel") and\
((not tail) or (is_jamo(tail) and get_jamo_class(tail) == "tail")):
result = _jamo_to_hangul_char(lead, vowel, tail)
if is_hangul_char(result):
return result
raise InvalidJamoError("Could not synthesize characters to Hangul.",
'\x00')
|
Return the Hangul character for the given jamo input.
Integers corresponding to U+11xx jamo codepoints, U+11xx jamo characters,
or HCJ are valid inputs.
Outputs a one-character Hangul string.
This function is identical to j2h.
|
entailment
|
def decompose_jamo(compound):
"""Return a tuple of jamo character constituents of a compound.
Note: Non-compound characters are echoed back.
WARNING: Archaic jamo compounds will raise NotImplementedError.
"""
if len(compound) != 1:
raise TypeError("decompose_jamo() expects a single character,",
"but received", type(compound), "length",
len(compound))
if compound not in JAMO_COMPOUNDS:
# Strict version:
# raise TypeError("decompose_jamo() expects a compound jamo,",
# "but received", compound)
return compound
return _JAMO_TO_COMPONENTS.get(compound, compound)
|
Return a tuple of jamo character constituents of a compound.
Note: Non-compound characters are echoed back.
WARNING: Archaic jamo compounds will raise NotImplementedError.
|
entailment
|
def compose_jamo(*parts):
"""Return the compound jamo for the given jamo input.
Integers corresponding to U+11xx jamo codepoints, U+11xx jamo
characters, or HCJ are valid inputs.
Outputs a one-character jamo string.
"""
# Internally, we convert everything to a jamo char,
# then pass it to _jamo_to_hangul_char
# NOTE: Relies on hcj_to_jamo not strictly requiring "position" arg.
for p in parts:
if not (type(p) == str and len(p) == 1 and 2 <= len(parts) <= 3):
raise TypeError("compose_jamo() expected 2-3 single characters " +
"but received " + str(parts),
'\x00')
hcparts = [j2hcj(_) for _ in parts]
hcparts = tuple(hcparts)
if hcparts in _COMPONENTS_REVERSE_LOOKUP:
return _COMPONENTS_REVERSE_LOOKUP[hcparts]
raise InvalidJamoError(
"Could not synthesize characters to compound: " + ", ".join(
str(_) + "(U+" + str(hex(ord(_)))[2:] +
")" for _ in parts), '\x00')
|
Return the compound jamo for the given jamo input.
Integers corresponding to U+11xx jamo codepoints, U+11xx jamo
characters, or HCJ are valid inputs.
Outputs a one-character jamo string.
|
entailment
|
def synth_hangul(string):
"""Convert jamo characters in a string into hcj as much as possible."""
raise NotImplementedError
return ''.join([''.join(''.join(jamo_to_hcj(_)) for _ in string)])
|
Convert jamo characters in a string into hcj as much as possible.
|
entailment
|
def authorization_error_class(response):
"""Return an exception instance that maps to the OAuth Error.
:param response: The HTTP response containing a www-authenticate error.
"""
message = response.headers.get("www-authenticate")
if message:
error = message.replace('"', "").rsplit("=", 1)[1]
else:
error = response.status_code
return _auth_error_mapping[error](response)
|
Return an exception instance that maps to the OAuth Error.
:param response: The HTTP response containing a www-authenticate error.
|
entailment
|
def _last_bookmark(b0, b1):
""" Return the latest of two bookmarks by looking for the maximum
integer value following the last colon in the bookmark string.
"""
n = [None, None]
_, _, n[0] = b0.rpartition(":")
_, _, n[1] = b1.rpartition(":")
for i in range(2):
try:
n[i] = int(n[i])
except ValueError:
raise ValueError("Invalid bookmark: {}".format(b0))
return b0 if n[0] > n[1] else b1
|
Return the latest of two bookmarks by looking for the maximum
integer value following the last colon in the bookmark string.
|
entailment
|
def last_bookmark(bookmarks):
""" The bookmark returned by the last :class:`.Transaction`.
"""
last = None
for bookmark in bookmarks:
if last is None:
last = bookmark
else:
last = _last_bookmark(last, bookmark)
return last
|
The bookmark returned by the last :class:`.Transaction`.
|
entailment
|
def connect(address, **config):
""" Connect and perform a handshake and return a valid Connection object, assuming
a protocol version can be agreed.
"""
ssl_context = make_ssl_context(**config)
last_error = None
# Establish a connection to the host and port specified
# Catches refused connections see:
# https://docs.python.org/2/library/errno.html
log_debug("[#0000] C: <RESOLVE> %s", address)
resolver = Resolver(custom_resolver=config.get("resolver"))
resolver.addresses.append(address)
resolver.custom_resolve()
resolver.dns_resolve()
for resolved_address in resolver.addresses:
try:
s = _connect(resolved_address, **config)
s, der_encoded_server_certificate = _secure(s, address[0], ssl_context)
connection = _handshake(s, resolved_address, der_encoded_server_certificate, **config)
except Exception as error:
last_error = error
else:
return connection
if last_error is None:
raise ServiceUnavailable("Failed to resolve addresses for %s" % address)
else:
raise last_error
|
Connect and perform a handshake and return a valid Connection object, assuming
a protocol version can be agreed.
|
entailment
|
def _append(self, signature, fields=(), response=None):
""" Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
"""
self.packer.pack_struct(signature, fields)
self.output_buffer.chunk()
self.output_buffer.chunk()
self.responses.append(response)
|
Add a message to the outgoing queue.
:arg signature: the signature of the message
:arg fields: the fields of the message as a tuple
:arg response: a response object to handle callbacks
|
entailment
|
def reset(self):
""" Add a RESET message to the outgoing queue, send
it and consume all remaining messages.
"""
def fail(metadata):
raise ProtocolError("RESET failed %r" % metadata)
log_debug("[#%04X] C: RESET", self.local_port)
self._append(b"\x0F", response=Response(self, on_failure=fail))
self.sync()
|
Add a RESET message to the outgoing queue, send
it and consume all remaining messages.
|
entailment
|
def _send(self):
""" Send all queued messages to the server.
"""
data = self.output_buffer.view()
if not data:
return
if self.closed():
raise self.Error("Failed to write to closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to write to defunct connection {!r}".format(self.server.address))
self.socket.sendall(data)
self.output_buffer.clear()
|
Send all queued messages to the server.
|
entailment
|
def _fetch(self):
""" Receive at least one message from the server, if available.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
if self.closed():
raise self.Error("Failed to read from closed connection {!r}".format(self.server.address))
if self.defunct():
raise self.Error("Failed to read from defunct connection {!r}".format(self.server.address))
if not self.responses:
return 0, 0
self._receive()
details, summary_signature, summary_metadata = self._unpack()
if details:
log_debug("[#%04X] S: RECORD * %d", self.local_port, len(details)) # TODO
self.responses[0].on_records(details)
if summary_signature is None:
return len(details), 0
response = self.responses.popleft()
response.complete = True
if summary_signature == b"\x70":
log_debug("[#%04X] S: SUCCESS %r", self.local_port, summary_metadata)
response.on_success(summary_metadata or {})
elif summary_signature == b"\x7E":
self._last_run_statement = None
log_debug("[#%04X] S: IGNORED", self.local_port)
response.on_ignored(summary_metadata or {})
elif summary_signature == b"\x7F":
self._last_run_statement = None
log_debug("[#%04X] S: FAILURE %r", self.local_port, summary_metadata)
response.on_failure(summary_metadata or {})
else:
self._last_run_statement = None
raise ProtocolError("Unexpected response message with signature %02X" % summary_signature)
return len(details), 1
|
Receive at least one message from the server, if available.
:return: 2-tuple of number of detail messages and number of summary messages fetched
|
entailment
|
def sync(self):
""" Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
"""
self.send()
detail_count = summary_count = 0
while self.responses:
response = self.responses[0]
while not response.complete:
detail_delta, summary_delta = self.fetch()
detail_count += detail_delta
summary_count += summary_delta
return detail_count, summary_count
|
Send and fetch all outstanding messages.
:return: 2-tuple of number of detail messages and number of summary messages fetched
|
entailment
|
def close(self):
""" Close the connection.
"""
if not self._closed:
if self.protocol_version >= 3:
log_debug("[#%04X] C: GOODBYE", self.local_port)
self._append(b"\x02", ())
try:
self.send()
except ServiceUnavailable:
pass
log_debug("[#%04X] C: <CLOSE>", self.local_port)
try:
self.socket.close()
except IOError:
pass
finally:
self._closed = True
|
Close the connection.
|
entailment
|
def acquire_direct(self, address):
""" Acquire a connection to a given address from the pool.
The address supplied should always be an IP address, not
a host name.
This method is thread safe.
"""
if self.closed():
raise ServiceUnavailable("Connection pool closed")
with self.lock:
try:
connections = self.connections[address]
except KeyError:
connections = self.connections[address] = deque()
connection_acquisition_start_timestamp = perf_counter()
while True:
# try to find a free connection in pool
for connection in list(connections):
if connection.closed() or connection.defunct() or connection.timedout():
connections.remove(connection)
continue
if not connection.in_use:
connection.in_use = True
return connection
# all connections in pool are in-use
infinite_connection_pool = (self._max_connection_pool_size < 0 or
self._max_connection_pool_size == float("inf"))
can_create_new_connection = infinite_connection_pool or len(connections) < self._max_connection_pool_size
if can_create_new_connection:
try:
connection = self.connector(address)
except ServiceUnavailable:
self.remove(address)
raise
else:
connection.pool = self
connection.in_use = True
connections.append(connection)
return connection
# failed to obtain a connection from pool because the pool is full and no free connection in the pool
span_timeout = self._connection_acquisition_timeout - (perf_counter() - connection_acquisition_start_timestamp)
if span_timeout > 0:
self.cond.wait(span_timeout)
# if timed out, then we throw error. This time computation is needed, as with python 2.7, we cannot
# tell if the condition is notified or timed out when we come to this line
if self._connection_acquisition_timeout <= (perf_counter() - connection_acquisition_start_timestamp):
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(
self._connection_acquisition_timeout))
else:
raise ClientError("Failed to obtain a connection from pool within {!r}s".format(self._connection_acquisition_timeout))
|
Acquire a connection to a given address from the pool.
The address supplied should always be an IP address, not
a host name.
This method is thread safe.
|
entailment
|
def release(self, connection):
""" Release a connection back into the pool.
This method is thread safe.
"""
with self.lock:
connection.in_use = False
self.cond.notify_all()
|
Release a connection back into the pool.
This method is thread safe.
|
entailment
|
def in_use_connection_count(self, address):
""" Count the number of connections currently in use to a given
address.
"""
try:
connections = self.connections[address]
except KeyError:
return 0
else:
return sum(1 if connection.in_use else 0 for connection in connections)
|
Count the number of connections currently in use to a given
address.
|
entailment
|
def deactivate(self, address):
""" Deactivate an address from the connection pool, if present, closing
all idle connection to that address
"""
with self.lock:
try:
connections = self.connections[address]
except KeyError: # already removed from the connection pool
return
for conn in list(connections):
if not conn.in_use:
connections.remove(conn)
try:
conn.close()
except IOError:
pass
if not connections:
self.remove(address)
|
Deactivate an address from the connection pool, if present, closing
all idle connection to that address
|
entailment
|
def remove(self, address):
""" Remove an address from the connection pool, if present, closing
all connections to that address.
"""
with self.lock:
for connection in self.connections.pop(address, ()):
try:
connection.close()
except IOError:
pass
|
Remove an address from the connection pool, if present, closing
all connections to that address.
|
entailment
|
def close(self):
""" Close all connections and empty the pool.
This method is thread safe.
"""
if self._closed:
return
try:
with self.lock:
if not self._closed:
self._closed = True
for address in list(self.connections):
self.remove(address)
except TypeError as e:
pass
|
Close all connections and empty the pool.
This method is thread safe.
|
entailment
|
def on_records(self, records):
""" Called when one or more RECORD messages have been received.
"""
handler = self.handlers.get("on_records")
if callable(handler):
handler(records)
|
Called when one or more RECORD messages have been received.
|
entailment
|
def on_success(self, metadata):
""" Called when a SUCCESS message has been received.
"""
handler = self.handlers.get("on_success")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
|
Called when a SUCCESS message has been received.
|
entailment
|
def on_failure(self, metadata):
""" Called when a FAILURE message has been received.
"""
self.connection.reset()
handler = self.handlers.get("on_failure")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
raise CypherError.hydrate(**metadata)
|
Called when a FAILURE message has been received.
|
entailment
|
def on_ignored(self, metadata=None):
""" Called when an IGNORED message has been received.
"""
handler = self.handlers.get("on_ignored")
if callable(handler):
handler(metadata)
handler = self.handlers.get("on_summary")
if callable(handler):
handler()
|
Called when an IGNORED message has been received.
|
entailment
|
def cached_property(prop):
"""
A replacement for the property decorator that will only compute the
attribute's value on the first call and serve a cached copy from
then on.
"""
def cache_wrapper(self):
if not hasattr(self, "_cache"):
self._cache = {}
if prop.__name__ not in self._cache:
return_value = prop(self)
if isgenerator(return_value):
return_value = tuple(return_value)
self._cache[prop.__name__] = return_value
return self._cache[prop.__name__]
return property(cache_wrapper)
|
A replacement for the property decorator that will only compute the
attribute's value on the first call and serve a cached copy from
then on.
|
entailment
|
def _convert_value_to_native(value):
"""
Converts pysnmp objects into native Python objects.
"""
if isinstance(value, Counter32):
return int(value.prettyPrint())
if isinstance(value, Counter64):
return int(value.prettyPrint())
if isinstance(value, Gauge32):
return int(value.prettyPrint())
if isinstance(value, Integer):
return int(value.prettyPrint())
if isinstance(value, Integer32):
return int(value.prettyPrint())
if isinstance(value, Unsigned32):
return int(value.prettyPrint())
if isinstance(value, IpAddress):
return str(value.prettyPrint())
if isinstance(value, OctetString):
try:
return value.asOctets().decode(value.encoding)
except UnicodeDecodeError:
return value.asOctets()
if isinstance(value, TimeTicks):
return timedelta(seconds=int(value.prettyPrint()) / 100.0)
return value
|
Converts pysnmp objects into native Python objects.
|
entailment
|
def get(self, oid):
"""
Get a single OID value.
"""
snmpsecurity = self._get_snmp_security()
try:
engine_error, pdu_error, pdu_error_index, objects = self._cmdgen.getCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
oid,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
_, value = objects[0]
value = _convert_value_to_native(value)
return value
|
Get a single OID value.
|
entailment
|
def set(self, oid, value, value_type=None):
"""
Sets a single OID value. If you do not pass value_type hnmp will
try to guess the correct type. Autodetection is supported for:
* int and float (as Integer, fractional part will be discarded)
* IPv4 address (as IpAddress)
* str (as OctetString)
Unfortunately, pysnmp does not support the SNMP FLOAT type so
please use Integer instead.
"""
snmpsecurity = self._get_snmp_security()
if value_type is None:
if isinstance(value, int):
data = Integer(value)
elif isinstance(value, float):
data = Integer(value)
elif isinstance(value, str):
if is_ipv4_address(value):
data = IpAddress(value)
else:
data = OctetString(value)
else:
raise TypeError(
"Unable to autodetect type. Please pass one of "
"these strings as the value_type keyword arg: "
", ".join(TYPES.keys())
)
else:
if value_type not in TYPES:
raise ValueError("'{}' is not one of the supported types: {}".format(
value_type,
", ".join(TYPES.keys())
))
data = TYPES[value_type](value)
try:
engine_error, pdu_error, pdu_error_index, objects = self._cmdgen.setCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
(oid, data),
)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
except Exception as e:
raise SNMPError(e)
_, value = objects[0]
value = _convert_value_to_native(value)
return value
|
Sets a single OID value. If you do not pass value_type hnmp will
try to guess the correct type. Autodetection is supported for:
* int and float (as Integer, fractional part will be discarded)
* IPv4 address (as IpAddress)
* str (as OctetString)
Unfortunately, pysnmp does not support the SNMP FLOAT type so
please use Integer instead.
|
entailment
|
def table(self, oid, columns=None, column_value_mapping=None, non_repeaters=0,
max_repetitions=20, fetch_all_columns=True):
"""
Get a table of values with the given OID prefix.
"""
snmpsecurity = self._get_snmp_security()
base_oid = oid.strip(".")
if not fetch_all_columns and not columns:
raise ValueError("please use the columns argument to "
"indicate which columns to fetch")
if fetch_all_columns:
columns_to_fetch = [""]
else:
columns_to_fetch = ["." + str(col_id) for col_id in columns.keys()]
full_obj_table = []
for col in columns_to_fetch:
try:
engine_error, pdu_error, pdu_error_index, obj_table = self._cmdgen.bulkCmd(
snmpsecurity,
cmdgen.UdpTransportTarget((self.host, self.port), timeout=self.timeout,
retries=self.retries),
non_repeaters,
max_repetitions,
oid + col,
)
except Exception as e:
raise SNMPError(e)
if engine_error:
raise SNMPError(engine_error)
if pdu_error:
raise SNMPError(pdu_error.prettyPrint())
# remove any trailing rows from the next subtree
try:
while not str(obj_table[-1][0][0].getOid()).lstrip(".").startswith(
base_oid + col + "."
):
obj_table.pop()
except IndexError:
pass
# append this column to full result
full_obj_table += obj_table
t = Table(columns=columns, column_value_mapping=column_value_mapping)
for row in full_obj_table:
for name, value in row:
oid = str(name.getOid()).strip(".")
value = _convert_value_to_native(value)
column, row_id = oid[len(base_oid) + 1:].split(".", 1)
t._add_value(int(column), row_id, value)
return t
|
Get a table of values with the given OID prefix.
|
entailment
|
def get_parser():
"""Load parser for command line arguments.
It parses argv/input into args variable.
"""
desc = Colors.LIGHTBLUE + textwrap.dedent(
'''\
Welcome to
_ _ _
__ _ _ _| |_ ___ _ __ _ _ | |_ ___ _ __ _ __ ___ _ __ | |_
/ _` | | | | __/ _ \ | '_ \| | | | | __/ _ \| '__| '__/ _ \ '_ \| __|
| (_| | |_| | || (_) | | |_) | |_| | | || (_) | | | | | __/ | | | |_
\__,_|\__,_|\__\___/____| .__/ \__, |___\__\___/|_| |_| \___|_| |_|\__|
|_____|_| |___/_____|
------------------------------------
auto_py_torrent is an automated tool for download files by obtaining
torrents or magnets that are in different provided pages that the
user can choose.
Its goal is to make it easier for users to find the files they want
and download them instantly.
An auto_py_torrent command is provided in which the user can
currently choose between two modes, best_rated and list mode, then it
selects one of the torrent tracking pages for multimedia content and
finally enter the text of what you want to download.
------------------------------------
''') + Colors.ENDC
usage_info = Colors.LGREEN + textwrap.dedent(
'''\
Use "%(prog)s --help" for more information.
Examples:
use "%(prog)s MODE SELECTED_PAGE STRING_TO_SEARCH # generic.
use "%(prog)s 0 0 "The simpsons" # best rated.
use "%(prog)s 1 0 "The simpsons" # list rated.
Mode options:
0: best_rated. # Download the most rated file.
1: list. # Get a list, and select one of them.
Page list options:
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
''') + Colors.ENDC
epi = Colors.LIGHTPURPLE + textwrap.dedent(
'''\
-> Thanks for using auto_py_torrent!
''') + Colors.ENDC
# Parent and only parser.
parser = argparse.ArgumentParser(
add_help=True,
formatter_class=argparse.RawTextHelpFormatter,
usage=usage_info,
description=desc,
epilog=epi)
parser.add_argument('mode', action='store',
choices=range(len(MODES)),
type=int,
help='Select mode of file download.\n'
' e.g: 0(rated) or 1(list).')
parser.add_argument('torr_page', action='store',
choices=range(len(TORRENTS)),
type=int,
help='Select tracking page to download from.\n'
' e.g: 0 to .. ' + str(len(TORRENTS)-1) + '.')
parser.add_argument('str_search', action='store',
type=str,
help='Input torrent string to search.\n'
' e.g: "String search"')
return(parser)
|
Load parser for command line arguments.
It parses argv/input into args variable.
|
entailment
|
def insert(args):
"""Insert args values into instance variables."""
string_search = args.str_search
mode_search = MODES[args.mode]
page = list(TORRENTS[args.torr_page].keys())[0]
key_search = TORRENTS[args.torr_page][page]['key_search']
torrent_page = TORRENTS[args.torr_page][page]['page']
domain = TORRENTS[args.torr_page][page]['domain']
return([args, string_search, mode_search, page,
key_search, torrent_page, domain])
|
Insert args values into instance variables.
|
entailment
|
def run_it():
"""Search and download torrents until the user says it so."""
initialize()
parser = get_parser()
args = None
first_parse = True
while(True):
if first_parse is True:
first_parse = False
args = parser.parse_args()
else:
print(textwrap.dedent(
'''\
Search again like in the beginning.
-- You can either choose best rated or list mode.
-- This time, you can insert the search string without double quotes.
Remember the list mode options!
0: torrent project.
1: the pirate bay.
2: 1337x.
3: eztv.
4: limetorrents.
5: isohunt.
'''))
print('Or.. if you want to exit just write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
input_parse = input('>> ').replace("'", "").replace('"', '')
if input_parse in ['Q', 'q']:
sys.exit(1)
args = parser.parse_args(input_parse.split(' ', 2))
if args.str_search.strip() == "":
print('Please insert an appropiate non-empty string.')
else:
auto = AutoPy(*insert(args))
auto.get_content()
auto.select_torrent()
auto.download_torrent()
|
Search and download torrents until the user says it so.
|
entailment
|
def open_magnet(self):
"""Open magnet according to os."""
if sys.platform.startswith('linux'):
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
elif sys.platform.startswith('win32'):
os.startfile(self.magnet)
elif sys.platform.startswith('cygwin'):
os.startfile(self.magnet)
elif sys.platform.startswith('darwin'):
subprocess.Popen(['open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
subprocess.Popen(['xdg-open', self.magnet],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
Open magnet according to os.
|
entailment
|
def get_magnet(self, url):
"""Get magnet from torrent page. Url already got domain."""
content_most_rated = requests.get(url)
rated_soup = BeautifulSoup(content_most_rated.content, 'lxml')
if self.page == 'torrent_project':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Download'))['href']
elif self.page == 'the_pirate_bay':
self.magnet = rated_soup.find(
'a', href=True, text=re.compile('Get this torrent'))['href']
elif self.page == '1337x':
div1337 = rated_soup.find(
'div', {'class': 'torrent-category-detail'})
self.magnet = div1337.find('a', href=re.compile('magnet'))['href']
elif self.page == 'isohunt':
self.magnet = rated_soup.find(
'a', href=re.compile('magnet'))['href']
else:
print('Wrong page to get magnet!')
sys.exit(1)
|
Get magnet from torrent page. Url already got domain.
|
entailment
|
def download_torrent(self):
"""Download torrent.
Rated implies download the unique best rated torrent found.
Otherwise: get the magnet and download it.
"""
try:
if self.back_to_menu is True:
return
if self.found_torrents is False:
print('Nothing found.')
return
if self.mode_search == 'best_rated':
print('Downloading..')
self.open_magnet()
elif self.mode_search == 'list':
if self.selected is not None:
# t_p, pirate and 1337x got magnet inside, else direct.
if self.page in ['eztv', 'limetorrents']:
self.magnet = self.hrefs[int(self.selected)]
print('Downloading..')
self.open_magnet()
elif self.page in ['the_pirate_bay',
'torrent_project',
'1337x',
'isohunt']:
url = self.hrefs[int(self.selected)]
self.get_magnet(url)
print('Downloading..')
self.open_magnet()
else:
print('Bad selected page.')
else:
print('Nothing selected.')
sys.exit(1)
except Exception:
print(traceback.format_exc())
sys.exit(0)
|
Download torrent.
Rated implies download the unique best rated torrent found.
Otherwise: get the magnet and download it.
|
entailment
|
def build_table(self):
"""Build table."""
headers = ['Title', 'Seeders', 'Leechers', 'Age', 'Size']
titles = []
seeders = []
leechers = []
ages = []
sizes = []
if self.page == 'torrent_project':
titles = [list(span.find('a').stripped_strings)[0]
for span in self.elements[0]]
seeders = [span.get_text() for span in self.elements[1]]
leechers = [span.get_text() for span in self.elements[2]]
ages = [span.get_text() for span in self.elements[3]]
sizes = [span.get_text() for span in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
span.find('a')['href']
for span in self.elements[0]]
elif self.page == 'the_pirate_bay':
for elem in self.elements[0]:
title = elem.find('a', {'class': 'detLink'}).get_text()
titles.append(title)
font_text = elem.find(
'font', {'class': 'detDesc'}).get_text()
dammit = UnicodeDammit(font_text)
age, size = dammit.unicode_markup.split(',')[:-1]
ages.append(age)
sizes.append(size)
# Torrent
href = self.domain + \
elem.find('a', title=re.compile('magnet'))['href']
self.hrefs.append(str(href))
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
elif self.page == '1337x':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[1]]
leechers = [elem.get_text() for elem in self.elements[2]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text('|').split('|')[0]
for elem in self.elements[4]]
# Torrent
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'eztv':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[4]]
leechers = ['-' for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find(href=re.compile('magnet'))['href']
for elem in self.elements[1]]
elif self.page == 'limetorrents':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[3]]
leechers = [elem.get_text() for elem in self.elements[4]]
ages = [elem.get_text() for elem in self.elements[1]]
sizes = [elem.get_text() for elem in self.elements[2]]
# Magnets
self.hrefs = [elem.find('a', href=re.compile('torrent'))['href']
for elem in self.elements[0]]
elif self.page == 'isohunt':
titles = [elem.get_text() for elem in self.elements[0]]
seeders = [elem.get_text() for elem in self.elements[5]]
leechers = ['-' for elem in self.elements[5]]
ages = [elem.get_text() for elem in self.elements[3]]
sizes = [elem.get_text() for elem in self.elements[4]]
# Torrents
self.hrefs = [self.domain +
elem.find(href=re.compile('torrent_details'))['href']
for elem in self.elements[0]]
else:
print('Error page')
self.table = [[Colors.BOLD +
UnicodeDammit(titles[i][:75].strip(), ["utf-8"]).unicode_markup +
Colors.ENDC
if (i + 1) % 2 == 0
else UnicodeDammit(
titles[i][:75].strip()).unicode_markup,
Colors.SEEDER + seeders[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LGREEN + seeders[i].strip() + Colors.ENDC,
Colors.LEECHER + leechers[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.LRED + leechers[i].strip() + Colors.ENDC,
Colors.LIGHTBLUE + ages[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.BLUE + ages[i].strip() + Colors.ENDC,
Colors.PINK + sizes[i].strip() + Colors.ENDC
if (i + 1) % 2 == 0
else Colors.PURPLE + sizes[i].strip() + Colors.ENDC]
for i in range(len(self.hrefs))]
print(tabulate(self.table,
headers=headers,
tablefmt='psql',
numalign='right',
stralign='left',
showindex=True))
|
Build table.
|
entailment
|
def soupify(self):
"""Get proper torrent/magnet information.
If search_mode is rated then get torrent/magnet.
If not, get all the elements to build the table.
There are different ways for each page.
"""
soup = BeautifulSoup(self.content_page.content, 'lxml')
if self.page == 'torrent_project':
main = soup.find('div', {'id': 'similarfiles'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find(href=re.compile('torrent.html'))['href']
self.get_magnet(rated_url)
else:
divs = main.find_all('div', limit=30)[2:]
self.elements = list(
zip(*[d.find_all('span', recursive=False)
for d in divs])) # Torrents
elif self.page == 'the_pirate_bay':
main = soup.find('table', {'id': 'searchResult'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile('torrent'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*[tr.find_all('td', recursive=False)[1:]
for tr in trs])) # Magnets
elif self.page == '1337x':
main = soup.find('table', {'class': 'table'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile('torrent'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[:-1]
for tr in trs]))) # Torrents
elif self.page == 'eztv':
main = soup.find_all('table', {'class': 'forum_header_border'})[2]
if self.mode_search == 'best_rated':
self.magnet = main.find('a', href=re.compile('magnet'))['href']
else:
trs = main.find_all('tr', limit=30)[2:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[1:-1]
for tr in trs]))) # Magnets
elif self.page == 'limetorrents':
main = soup.find('table', {'class': 'table2'})
if self.mode_search == 'best_rated':
self.magnet = main.find(
'a', href=re.compile('torrent'))['href']
else:
trs = main.find_all('tr', limit=30)[1:]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[:-1]
for tr in trs]))) # Magnets
elif self.page == 'isohunt':
main = soup.find('table', {'class': 'table'})
if self.mode_search == 'best_rated':
rated_url = self.domain + \
main.find('a', href=re.compile(
'torrent_details'))['href']
self.get_magnet(rated_url)
else:
trs = main.find_all('tr', limit=30)[1:-1]
self.elements = list(
zip(*([tr.find_all('td', recursive=False)[1:-1]
for tr in trs]))) # Torrent
else:
print('Cannot soupify current page. Try again.')
|
Get proper torrent/magnet information.
If search_mode is rated then get torrent/magnet.
If not, get all the elements to build the table.
There are different ways for each page.
|
entailment
|
def handle_select(self):
"""Handle user's input in list mode."""
self.selected = input('>> ')
if self.selected in ['Q', 'q']:
sys.exit(1)
elif self.selected in ['B', 'b']:
self.back_to_menu = True
return True
elif is_num(self.selected):
if 0 <= int(self.selected) <= len(self.hrefs) - 1:
self.back_to_menu = False
return True
else:
print(Colors.FAIL +
'Wrong index. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False
else:
print(Colors.FAIL +
'Invalid input. ' +
'Please select an appropiate one or other option.' +
Colors.ENDC)
return False
|
Handle user's input in list mode.
|
entailment
|
def select_torrent(self):
"""Select torrent.
First check if specific element/info is obtained in content_page.
Specify to user if it wants best rated torrent or select one from list.
If the user wants best rated: Directly obtain magnet/torrent.
Else: build table with all data and enable the user select the torrent.
"""
try:
self.found_torrents = not bool(self.key_search in
self.content_page.text)
if not self.found_torrents:
print('No torrents found.')
sys.exit(1)
self.soupify()
if self.mode_search == 'list':
self.build_table()
if len(self.hrefs) == 1:
print('Press "0" to download it.')
elif len(self.hrefs) >= 2:
print('\nSelect one of the following torrents. ' +
'Enter a number between: 0 and ' +
str(len(self.hrefs) - 1))
print('If you want to exit write "' +
Colors.LRED + 'Q' + Colors.ENDC + '" or "' +
Colors.LRED + 'q' + Colors.ENDC + '".')
print('If you want to go back to menu and search again write "' +
Colors.LGREEN + 'B' + Colors.ENDC + '" or "' +
Colors.LGREEN + 'b' + Colors.ENDC + '".')
while not(self.picked_choice):
self.picked_choice = self.handle_select()
except Exception:
print('ERROR select_torrent: ')
logging.error(traceback.format_exc())
sys.exit(0)
|
Select torrent.
First check if specific element/info is obtained in content_page.
Specify to user if it wants best rated torrent or select one from list.
If the user wants best rated: Directly obtain magnet/torrent.
Else: build table with all data and enable the user select the torrent.
|
entailment
|
def build_url(self):
"""Build appropiate encoded URL.
This implies the same way of searching a torrent as in the page itself.
"""
url = requests.utils.requote_uri(
self.torrent_page + self.string_search)
if self.page == '1337x':
return(url + '/1/')
elif self.page == 'limetorrents':
return(url + '/')
else:
return(url)
|
Build appropiate encoded URL.
This implies the same way of searching a torrent as in the page itself.
|
entailment
|
def get_content(self):
"""Get content of the page through url."""
url = self.build_url()
try:
self.content_page = requests.get(url)
if not(self.content_page.status_code == requests.codes.ok):
self.content_page.raise_for_status()
except requests.exceptions.RequestException as ex:
logging.info('A requests exception has ocurred: ' + str(ex))
logging.error(traceback.format_exc())
sys.exit(0)
|
Get content of the page through url.
|
entailment
|
def _recycle(self):
""" Reclaim buffer space before the origin.
Note: modifies buffer size
"""
origin = self._origin
if origin == 0:
return False
available = self._extent - origin
self._data[:available] = self._data[origin:self._extent]
self._extent = available
self._origin = 0
#log_debug("Recycled %d bytes" % origin)
return True
|
Reclaim buffer space before the origin.
Note: modifies buffer size
|
entailment
|
def frame_message(self):
""" Construct a frame around the first complete message in the buffer.
"""
if self._frame is not None:
self.discard_message()
panes = []
p = origin = self._origin
extent = self._extent
while p < extent:
available = extent - p
if available < 2:
break
chunk_size, = struct_unpack(">H", self._view[p:(p + 2)])
p += 2
if chunk_size == 0:
self._limit = p
self._frame = MessageFrame(memoryview(self._view[origin:self._limit]), panes)
return True
q = p + chunk_size
panes.append((p - origin, q - origin))
p = q
return False
|
Construct a frame around the first complete message in the buffer.
|
entailment
|
def call(self, request_function, set_header_callback, *args, **kwargs):
"""Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``.
"""
self.delay()
kwargs["headers"] = set_header_callback()
response = request_function(*args, **kwargs)
self.update(response.headers)
return response
|
Rate limit the call to request_function.
:param request_function: A function call that returns an HTTP response
object.
:param set_header_callback: A callback function used to set the request
headers. This callback is called after any necessary sleep time
occurs.
:param *args: The positional arguments to ``request_function``.
:param **kwargs: The keyword arguments to ``request_function``.
|
entailment
|
def delay(self):
"""Sleep for an amount of time to remain under the rate limit."""
if self.next_request_timestamp is None:
return
sleep_seconds = self.next_request_timestamp - time.time()
if sleep_seconds <= 0:
return
message = "Sleeping: {:0.2f} seconds prior to" " call".format(
sleep_seconds
)
log.debug(message)
time.sleep(sleep_seconds)
|
Sleep for an amount of time to remain under the rate limit.
|
entailment
|
def update(self, response_headers):
"""Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior.
"""
if "x-ratelimit-remaining" not in response_headers:
if self.remaining is not None:
self.remaining -= 1
self.used += 1
return
now = time.time()
prev_remaining = self.remaining
seconds_to_reset = int(response_headers["x-ratelimit-reset"])
self.remaining = float(response_headers["x-ratelimit-remaining"])
self.used = int(response_headers["x-ratelimit-used"])
self.reset_timestamp = now + seconds_to_reset
if self.remaining <= 0:
self.next_request_timestamp = self.reset_timestamp
return
if prev_remaining is not None and prev_remaining > self.remaining:
estimated_clients = prev_remaining - self.remaining
else:
estimated_clients = 1.0
self.next_request_timestamp = min(
self.reset_timestamp,
now + (estimated_clients * seconds_to_reset / self.remaining),
)
|
Update the state of the rate limiter based on the response headers.
This method should only be called following a HTTP request to reddit.
Response headers that do not contain x-ratelimit fields will be treated
as a single request. This behavior is to error on the safe-side as such
responses should trigger exceptions that indicate invalid behavior.
|
entailment
|
def custom_resolve(self):
""" If a custom resolver is defined, perform custom resolution on
the contained addresses.
:return:
"""
if not callable(self.custom_resolver):
return
new_addresses = []
for address in self.addresses:
for new_address in self.custom_resolver(address):
new_addresses.append(new_address)
self.addresses = new_addresses
|
If a custom resolver is defined, perform custom resolution on
the contained addresses.
:return:
|
entailment
|
def dns_resolve(self):
""" Perform DNS resolution on the contained addresses.
:return:
"""
new_addresses = []
for address in self.addresses:
try:
info = getaddrinfo(address[0], address[1], 0, SOCK_STREAM, IPPROTO_TCP)
except gaierror:
raise AddressError("Cannot resolve address {!r}".format(address))
else:
for _, _, _, _, address in info:
if len(address) == 4 and address[3] != 0:
# skip any IPv6 addresses with a non-zero scope id
# as these appear to cause problems on some platforms
continue
new_addresses.append(address)
self.addresses = new_addresses
|
Perform DNS resolution on the contained addresses.
:return:
|
entailment
|
def get_quality(cell):
""" Gets the quality of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The quality of the network.
"""
quality = matching_line(cell, "Quality=")
if quality is None:
return ""
quality = quality.split()[0].split("/")
quality = matching_line(cell, "Quality=").split()[0].split("/")
return str(int(round(float(quality[0]) / float(quality[1]) * 100)))
|
Gets the quality of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The quality of the network.
|
entailment
|
def get_signal_level(cell):
""" Gets the signal level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The signal level of the network.
"""
signal = matching_line(cell, "Signal level=")
if signal is None:
return ""
signal = signal.split("=")[1].split("/")
if len(signal) == 2:
return str(int(round(float(signal[0]) / float(signal[1]) * 100)))
elif len(signal) == 1:
return signal[0].split(' ')[0]
else:
return ""
|
Gets the signal level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The signal level of the network.
|
entailment
|
def get_noise_level(cell):
""" Gets the noise level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The noise level of the network.
"""
noise = matching_line(cell, "Noise level=")
if noise is None:
return ""
noise = noise.split("=")[1]
return noise.split(' ')[0]
|
Gets the noise level of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The noise level of the network.
|
entailment
|
def get_channel(cell):
""" Gets the channel of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The channel of the network.
"""
channel = matching_line(cell, "Channel:")
if channel:
return channel
frequency = matching_line(cell, "Frequency:")
channel = re.sub(r".*\(Channel\s(\d{1,3})\).*", r"\1", frequency)
return channel
|
Gets the channel of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The channel of the network.
|
entailment
|
def get_encryption(cell, emit_version=False):
""" Gets the encryption type of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The encryption type of the network.
"""
enc = ""
if matching_line(cell, "Encryption key:") == "off":
enc = "Open"
else:
for line in cell:
matching = match(line,"IE:")
if matching == None:
continue
wpa = match(matching,"WPA")
if wpa == None:
continue
version_matches = VERSION_RGX.search(wpa)
if len(version_matches.regs) == 1:
version = version_matches \
.group(0) \
.lower() \
.replace("version", "") \
.strip()
wpa = wpa.replace(version_matches.group(0), "").strip()
if wpa == "":
wpa = "WPA"
if emit_version:
enc = "{0} v.{1}".format(wpa, version)
else:
enc = wpa
if wpa == "WPA2":
return enc
else:
enc = wpa
if enc == "":
enc = "WEP"
return enc
|
Gets the encryption type of a network / cell.
@param string cell
A network / cell from iwlist scan.
@return string
The encryption type of the network.
|
entailment
|
def matching_line(lines, keyword):
""" Returns the first matching line in a list of lines.
@see match()
"""
for line in lines:
matching = match(line,keyword)
if matching != None:
return matching
return None
|
Returns the first matching line in a list of lines.
@see match()
|
entailment
|
def match(line, keyword):
""" If the first part of line (modulo blanks) matches keyword,
returns the end of that line. Otherwise checks if keyword is
anywhere in the line and returns that section, else returns None"""
line = line.lstrip()
length = len(keyword)
if line[:length] == keyword:
return line[length:]
else:
if keyword in line:
return line[line.index(keyword):]
else:
return None
|
If the first part of line (modulo blanks) matches keyword,
returns the end of that line. Otherwise checks if keyword is
anywhere in the line and returns that section, else returns None
|
entailment
|
def parse_cell(cell, rules):
""" Applies the rules to the bunch of text describing a cell.
@param string cell
A network / cell from iwlist scan.
@param dictionary rules
A dictionary of parse rules.
@return dictionary
parsed networks. """
parsed_cell = {}
for key in rules:
rule = rules[key]
parsed_cell.update({key: rule(cell)})
return parsed_cell
|
Applies the rules to the bunch of text describing a cell.
@param string cell
A network / cell from iwlist scan.
@param dictionary rules
A dictionary of parse rules.
@return dictionary
parsed networks.
|
entailment
|
def get_parsed_cells(iw_data, rules=None):
""" Parses iwlist output into a list of networks.
@param list iw_data
Output from iwlist scan.
A list of strings.
@return list
properties: Name, Address, Quality, Channel, Frequency, Encryption, Signal Level, Noise Level, Bit Rates, Mode.
"""
# Here's a dictionary of rules that will be applied to the description
# of each cell. The key will be the name of the column in the table.
# The value is a function defined above.
rules = rules or {
"Name": get_name,
"Quality": get_quality,
"Channel": get_channel,
"Frequency": get_frequency,
"Encryption": get_encryption,
"Address": get_address,
"Signal Level": get_signal_level,
"Noise Level": get_noise_level,
"Bit Rates": get_bit_rates,
"Mode": get_mode,
}
cells = [[]]
parsed_cells = []
for line in iw_data:
cell_line = match(line, "Cell ")
if cell_line != None:
cells.append([])
line = cell_line[-27:]
cells[-1].append(line.rstrip())
cells = cells[1:]
for cell in cells:
parsed_cells.append(parse_cell(cell, rules))
sort_cells(parsed_cells)
return parsed_cells
|
Parses iwlist output into a list of networks.
@param list iw_data
Output from iwlist scan.
A list of strings.
@return list
properties: Name, Address, Quality, Channel, Frequency, Encryption, Signal Level, Noise Level, Bit Rates, Mode.
|
entailment
|
def request(
self, method, path, data=None, files=None, json=None, params=None
):
"""Return the json content from the resource at ``path``.
:param method: The request verb. E.g., get, post, put.
:param path: The path of the request. This path will be combined with
the ``oauth_url`` of the Requestor.
:param data: Dictionary, bytes, or file-like object to send in the body
of the request.
:param files: Dictionary, mapping ``filename`` to file-like object.
:param json: Object to be serialized to JSON in the body of the
request.
:param params: The query parameters to send with the request.
Automatically refreshes the access token if it becomes invalid and a
refresh token is available. Raises InvalidInvocation in such a case if
a refresh token is not available.
"""
params = deepcopy(params) or {}
params["raw_json"] = 1
if isinstance(data, dict):
data = deepcopy(data)
data["api_type"] = "json"
data = sorted(data.items())
url = urljoin(self._requestor.oauth_url, path)
return self._request_with_retries(
data=data,
files=files,
json=json,
method=method,
params=params,
url=url,
)
|
Return the json content from the resource at ``path``.
:param method: The request verb. E.g., get, post, put.
:param path: The path of the request. This path will be combined with
the ``oauth_url`` of the Requestor.
:param data: Dictionary, bytes, or file-like object to send in the body
of the request.
:param files: Dictionary, mapping ``filename`` to file-like object.
:param json: Object to be serialized to JSON in the body of the
request.
:param params: The query parameters to send with the request.
Automatically refreshes the access token if it becomes invalid and a
refresh token is available. Raises InvalidInvocation in such a case if
a refresh token is not available.
|
entailment
|
def main():
"""Provide the program's entry point when directly executed."""
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_script_auth_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ScriptAuthorizer(
authenticator,
os.environ["PRAWCORE_USERNAME"],
os.environ["PRAWCORE_PASSWORD"],
)
authorizer.refresh()
with prawcore.session(authorizer) as session:
data = session.request("GET", "/api/v1/me/friends")
for friend in data["data"]["children"]:
print(friend["name"])
return 0
|
Provide the program's entry point when directly executed.
|
entailment
|
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) != 2:
print("Usage: {} USERNAME".format(sys.argv[0]))
return 1
caching_requestor = prawcore.Requestor(
"prawcore_device_id_auth_example", session=CachingSession()
)
authenticator = prawcore.TrustedAuthenticator(
caching_requestor,
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ReadOnlyAuthorizer(authenticator)
authorizer.refresh()
user = sys.argv[1]
with prawcore.session(authorizer) as session:
data1 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
with prawcore.session(authorizer) as session:
data2 = session.request("GET", "/api/v1/user/{}/trophies".format(user))
for trophy in data1["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Original:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
for trophy in data2["data"]["trophies"]:
description = trophy["data"]["description"]
print(
"Cached:",
trophy["data"]["name"]
+ (" ({})".format(description) if description else ""),
)
print(
"----\nCached == Original:",
data2["data"]["trophies"] == data2["data"]["trophies"],
)
return 0
|
Provide the program's entry point when directly executed.
|
entailment
|
def request(self, method, url, params=None, **kwargs):
"""Perform a request, or return a cached response if available."""
params_key = tuple(params.items()) if params else ()
if method.upper() == "GET":
if (url, params_key) in self.get_cache:
print("Returning cached response for:", method, url, params)
return self.get_cache[(url, params_key)]
result = super().request(method, url, params, **kwargs)
if method.upper() == "GET":
self.get_cache[(url, params_key)] = result
print("Adding entry to the cache:", method, url, params)
return result
|
Perform a request, or return a cached response if available.
|
entailment
|
def parse_routing_info(cls, records):
""" Parse the records returned from a getServers call and
return a new RoutingTable instance.
"""
if len(records) != 1:
raise RoutingProtocolError("Expected exactly one record")
record = records[0]
routers = []
readers = []
writers = []
try:
servers = record["servers"]
for server in servers:
role = server["role"]
addresses = []
for address in server["addresses"]:
addresses.append(SocketAddress.parse(address, DEFAULT_PORT))
if role == "ROUTE":
routers.extend(addresses)
elif role == "READ":
readers.extend(addresses)
elif role == "WRITE":
writers.extend(addresses)
ttl = record["ttl"]
except (KeyError, TypeError):
raise RoutingProtocolError("Cannot parse routing info")
else:
return cls(routers, readers, writers, ttl)
|
Parse the records returned from a getServers call and
return a new RoutingTable instance.
|
entailment
|
def is_fresh(self, access_mode):
""" Indicator for whether routing information is still usable.
"""
log_debug("[#0000] C: <ROUTING> Checking table freshness for %r", access_mode)
expired = self.last_updated_time + self.ttl <= self.timer()
has_server_for_mode = bool(access_mode == READ_ACCESS and self.readers) or bool(access_mode == WRITE_ACCESS and self.writers)
log_debug("[#0000] C: <ROUTING> Table expired=%r", expired)
log_debug("[#0000] C: <ROUTING> Table routers=%r", self.routers)
log_debug("[#0000] C: <ROUTING> Table has_server_for_mode=%r", has_server_for_mode)
return not expired and self.routers and has_server_for_mode
|
Indicator for whether routing information is still usable.
|
entailment
|
def update(self, new_routing_table):
""" Update the current routing table with new routing information
from a replacement table.
"""
self.routers.replace(new_routing_table.routers)
self.readers.replace(new_routing_table.readers)
self.writers.replace(new_routing_table.writers)
self.last_updated_time = self.timer()
self.ttl = new_routing_table.ttl
log_debug("[#0000] S: <ROUTING> table=%r", self)
|
Update the current routing table with new routing information
from a replacement table.
|
entailment
|
def fetch_routing_info(self, address):
""" Fetch raw routing info from a given router address.
:param address: router address
:return: list of routing records or
None if no connection could be established
:raise ServiceUnavailable: if the server does not support routing or
if routing support is broken
"""
metadata = {}
records = []
def fail(md):
if md.get("code") == "Neo.ClientError.Procedure.ProcedureNotFound":
raise RoutingProtocolError("Server {!r} does not support routing".format(address))
else:
raise RoutingProtocolError("Routing support broken on server {!r}".format(address))
try:
with self.acquire_direct(address) as cx:
_, _, server_version = (cx.server.agent or "").partition("/")
# TODO 2.0: remove old routing procedure
if server_version and Version.parse(server_version) >= Version((3, 2)):
log_debug("[#%04X] C: <ROUTING> query=%r", cx.local_port, self.routing_context or {})
cx.run("CALL dbms.cluster.routing.getRoutingTable({context})",
{"context": self.routing_context}, on_success=metadata.update, on_failure=fail)
else:
log_debug("[#%04X] C: <ROUTING> query={}", cx.local_port)
cx.run("CALL dbms.cluster.routing.getServers", {}, on_success=metadata.update, on_failure=fail)
cx.pull_all(on_success=metadata.update, on_records=records.extend)
cx.sync()
routing_info = [dict(zip(metadata.get("fields", ()), values)) for values in records]
log_debug("[#%04X] S: <ROUTING> info=%r", cx.local_port, routing_info)
return routing_info
except RoutingProtocolError as error:
raise ServiceUnavailable(*error.args)
except ServiceUnavailable:
self.deactivate(address)
return None
|
Fetch raw routing info from a given router address.
:param address: router address
:return: list of routing records or
None if no connection could be established
:raise ServiceUnavailable: if the server does not support routing or
if routing support is broken
|
entailment
|
def fetch_routing_table(self, address):
""" Fetch a routing table from a given router address.
:param address: router address
:return: a new RoutingTable instance or None if the given router is
currently unable to provide routing information
:raise ServiceUnavailable: if no writers are available
:raise ProtocolError: if the routing information received is unusable
"""
new_routing_info = self.fetch_routing_info(address)
if new_routing_info is None:
return None
# Parse routing info and count the number of each type of server
new_routing_table = RoutingTable.parse_routing_info(new_routing_info)
num_routers = len(new_routing_table.routers)
num_readers = len(new_routing_table.readers)
num_writers = len(new_routing_table.writers)
# No writers are available. This likely indicates a temporary state,
# such as leader switching, so we should not signal an error.
# When no writers available, then we flag we are reading in absence of writer
self.missing_writer = (num_writers == 0)
# No routers
if num_routers == 0:
raise RoutingProtocolError("No routing servers returned from server %r" % (address,))
# No readers
if num_readers == 0:
raise RoutingProtocolError("No read servers returned from server %r" % (address,))
# At least one of each is fine, so return this table
return new_routing_table
|
Fetch a routing table from a given router address.
:param address: router address
:return: a new RoutingTable instance or None if the given router is
currently unable to provide routing information
:raise ServiceUnavailable: if no writers are available
:raise ProtocolError: if the routing information received is unusable
|
entailment
|
def update_routing_table_from(self, *routers):
""" Try to update routing tables with the given routers.
:return: True if the routing table is successfully updated, otherwise False
"""
for router in routers:
new_routing_table = self.fetch_routing_table(router)
if new_routing_table is not None:
self.routing_table.update(new_routing_table)
return True
return False
|
Try to update routing tables with the given routers.
:return: True if the routing table is successfully updated, otherwise False
|
entailment
|
def update_routing_table(self):
""" Update the routing table from the first router able to provide
valid routing information.
"""
# copied because it can be modified
existing_routers = list(self.routing_table.routers)
has_tried_initial_routers = False
if self.missing_writer:
has_tried_initial_routers = True
if self.update_routing_table_from(self.initial_address):
return
if self.update_routing_table_from(*existing_routers):
return
if not has_tried_initial_routers and self.initial_address not in existing_routers:
if self.update_routing_table_from(self.initial_address):
return
# None of the routers have been successful, so just fail
raise ServiceUnavailable("Unable to retrieve routing information")
|
Update the routing table from the first router able to provide
valid routing information.
|
entailment
|
def ensure_routing_table_is_fresh(self, access_mode):
""" Update the routing table if stale.
This method performs two freshness checks, before and after acquiring
the refresh lock. If the routing table is already fresh on entry, the
method exits immediately; otherwise, the refresh lock is acquired and
the second freshness check that follows determines whether an update
is still required.
This method is thread-safe.
:return: `True` if an update was required, `False` otherwise.
"""
if self.routing_table.is_fresh(access_mode):
return False
with self.refresh_lock:
if self.routing_table.is_fresh(access_mode):
if access_mode == READ_ACCESS:
# if reader is fresh but writers is not fresh, then we are reading in absence of writer
self.missing_writer = not self.routing_table.is_fresh(WRITE_ACCESS)
return False
self.update_routing_table()
self.update_connection_pool()
return True
|
Update the routing table if stale.
This method performs two freshness checks, before and after acquiring
the refresh lock. If the routing table is already fresh on entry, the
method exits immediately; otherwise, the refresh lock is acquired and
the second freshness check that follows determines whether an update
is still required.
This method is thread-safe.
:return: `True` if an update was required, `False` otherwise.
|
entailment
|
def deactivate(self, address):
""" Deactivate an address from the connection pool,
if present, remove from the routing table and also closing
all idle connections to that address.
"""
log_debug("[#0000] C: <ROUTING> Deactivating address %r", address)
# We use `discard` instead of `remove` here since the former
# will not fail if the address has already been removed.
self.routing_table.routers.discard(address)
self.routing_table.readers.discard(address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
super(RoutingConnectionPool, self).deactivate(address)
|
Deactivate an address from the connection pool,
if present, remove from the routing table and also closing
all idle connections to that address.
|
entailment
|
def remove_writer(self, address):
""" Remove a writer address from the routing table, if present.
"""
log_debug("[#0000] C: <ROUTING> Removing writer %r", address)
self.routing_table.writers.discard(address)
log_debug("[#0000] C: <ROUTING> table=%r", self.routing_table)
|
Remove a writer address from the routing table, if present.
|
entailment
|
def handle(self, error, connection):
""" Handle any cleanup or similar activity related to an error
occurring on a pooled connection.
"""
error_class = error.__class__
if error_class in (ConnectionExpired, ServiceUnavailable, DatabaseUnavailableError):
self.deactivate(connection.address)
elif error_class in (NotALeaderError, ForbiddenOnReadOnlyDatabaseError):
self.remove_writer(connection.address)
|
Handle any cleanup or similar activity related to an error
occurring on a pooled connection.
|
entailment
|
def point_type(name, fields, srid_map):
""" Dynamically create a Point subclass.
"""
def srid(self):
try:
return srid_map[len(self)]
except KeyError:
return None
attributes = {"srid": property(srid)}
for index, subclass_field in enumerate(fields):
def accessor(self, i=index, f=subclass_field):
try:
return self[i]
except IndexError:
raise AttributeError(f)
for field_alias in {subclass_field, "xyz"[index]}:
attributes[field_alias] = property(accessor)
cls = type(name, (Point,), attributes)
with __srid_table_lock:
for dim, srid in srid_map.items():
__srid_table[srid] = (cls, dim)
return cls
|
Dynamically create a Point subclass.
|
entailment
|
def main():
"""Provide the program's entry point when directly executed."""
if len(sys.argv) != 2:
print("Usage: {} USERNAME".format(sys.argv[0]))
return 1
authenticator = prawcore.TrustedAuthenticator(
prawcore.Requestor("prawcore_read_only_example"),
os.environ["PRAWCORE_CLIENT_ID"],
os.environ["PRAWCORE_CLIENT_SECRET"],
)
authorizer = prawcore.ReadOnlyAuthorizer(authenticator)
authorizer.refresh()
user = sys.argv[1]
with prawcore.session(authorizer) as session:
data = session.request("GET", "/api/v1/user/{}/trophies".format(user))
for trophy in data["data"]["trophies"]:
description = trophy["data"]["description"]
print(
trophy["data"]["name"]
+ (" ({})".format(description) if description else "")
)
return 0
|
Provide the program's entry point when directly executed.
|
entailment
|
def main():
"""Read a directory containing json files for Kibana panels,
beautify them and replace size value in aggregations as specified
through corresponding params params.
"""
args = parse_args()
configure_logging(args.debug)
src_path = args.src_path
dest_path = args.dest_path
old_str1 = '\\"size\\":' + args.old_size
old_str2 = '\\"size\\": ' + args.old_size
new_str = '\\"size\\":' + args.new_size
logging.info('Input path: %s', src_path)
logging.info('Output path: %s', dest_path)
logging.info('old str: %s', old_str1)
logging.info('old str: %s', old_str2)
logging.info('new str: %s', new_str)
if os.path.abspath(src_path) == os.path.abspath(dest_path):
logging.error('source and destination directiories must be different')
sys.exit(1)
# Iterate over input files
json_files = [f for f in os.listdir(src_path) if f.endswith('.json')]
for filename in json_files:
in_file_path = os.path.join(src_path, filename)
in_file_path = os.path.join(src_path, filename)
out_file_path = os.path.join(dest_path, filename)
logging.info('INPUT FILE: %s',in_file_path)
logging.info('OUTPUT FILE: %s',out_file_path)
# First beautify input
pretty = utils.beautify(filename=in_file_path)
# Iterate the beautified json string line by line
pretty_replaced = utils.replace(pretty, old_str1, new_str)
pretty_replaced = utils.replace(pretty_replaced, old_str2, new_str)
with open(out_file_path, 'w') as output_file:
output_file.write(pretty_replaced)
logging.info('This is the end.')
|
Read a directory containing json files for Kibana panels,
beautify them and replace size value in aggregations as specified
through corresponding params params.
|
entailment
|
def parse_args():
"""Parse arguments from the command line"""
parser = argparse.ArgumentParser(description=TO_KIBANA5_DESC_MSG)
parser.add_argument('-s', '--source', dest='src_path', \
required=True, help='source directory')
parser.add_argument('-d', '--dest', dest='dest_path', \
required=True, help='destination directory')
parser.add_argument('-o', '--old-size', dest='old_size', \
default='0', help='aggregation old size')
parser.add_argument('-n', '--new-size', dest='new_size', \
default='1000', help='aggregation new size')
parser.add_argument('-g', '--debug', dest='debug',
action='store_true')
return parser.parse_args()
|
Parse arguments from the command line
|
entailment
|
def configure_logging(debug=False):
"""Configure logging
The function configures log messages. By default, log messages
are sent to stderr. Set the parameter `debug` to activate the
debug mode.
:param debug: set the debug mode
"""
if not debug:
logging.basicConfig(level=logging.INFO,
format=LOG_FORMAT)
else:
logging.basicConfig(level=logging.DEBUG,
format=DEBUG_LOG_FORMAT)
|
Configure logging
The function configures log messages. By default, log messages
are sent to stderr. Set the parameter `debug` to activate the
debug mode.
:param debug: set the debug mode
|
entailment
|
def signal_handler(signal_name, frame):
"""Quit signal handler."""
sys.stdout.flush()
print("\nSIGINT in frame signal received. Quitting...")
sys.stdout.flush()
sys.exit(0)
|
Quit signal handler.
|
entailment
|
def graph_format(new_mem, old_mem, is_firstiteration=True):
"""Show changes graphically in memory consumption"""
if is_firstiteration:
output = " n/a "
elif new_mem - old_mem > 50000000:
output = " +++++"
elif new_mem - old_mem > 20000000:
output = " ++++ "
elif new_mem - old_mem > 5000000:
output = " +++ "
elif new_mem - old_mem > 1000000:
output = " ++ "
elif new_mem - old_mem > 50000:
output = " + "
elif old_mem - new_mem > 10000000:
output = "--- "
elif old_mem - new_mem > 2000000:
output = " -- "
elif old_mem - new_mem > 100000:
output = " - "
else:
output = " "
return output
|
Show changes graphically in memory consumption
|
entailment
|
def get_cur_mem_use():
"""return utilization of memory"""
# http://lwn.net/Articles/28345/
lines = open("/proc/meminfo", 'r').readlines()
emptySpace = re.compile('[ ]+')
for line in lines:
if "MemTotal" in line:
memtotal = float(emptySpace.split(line)[1])
if "SwapFree" in line:
swapfree = float(emptySpace.split(line)[1])
if "SwapTotal" in line:
swaptotal = float(emptySpace.split(line)[1])
if "MemFree" in line:
memfree = float(emptySpace.split(line)[1])
if "Cached" in line and not "SwapCached" in line:
cached = float(emptySpace.split(line)[1])
ramoccup = 1.0 - (memfree + cached) / memtotal
if swaptotal == 0:
swapoccup = 0
else:
swapoccup = 1.0 - swapfree / swaptotal
strramoccup = str(round(ramoccup * 100.0, 1))
strswapoccup = str(round(swapoccup * 100.0, 1))
return float(memtotal), strramoccup, strswapoccup
|
return utilization of memory
|
entailment
|
def check_py_version():
"""Check if a propper Python version is used."""
try:
if sys.version_info >= (2, 7):
return
except:
pass
print(" ")
print(" ERROR - memtop needs python version at least 2.7")
print(("Chances are that you can install newer version from your "
"repositories, or even that you have some newer version "
"installed yet."))
print("(one way to find out which versions are installed is to try "
"following: 'which python2.7' , 'which python3' and so...)")
print(" ")
sys.exit(-1)
|
Check if a propper Python version is used.
|
entailment
|
def character(prompt=None, empty=False):
"""Prompt a single character.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a single-character, non-empty string.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
elif len(s) == 1:
return s
else:
return character(prompt=prompt, empty=empty)
|
Prompt a single character.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
str or None
A str if the user entered a single-character, non-empty string.
None if the user pressed only Enter and ``empty`` was True.
|
entailment
|
def email(prompt=None, empty=False, mode="simple"):
"""Prompt an email address.
This check is based on a simple regular expression and does not verify
whether an email actually exists.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
mode : {'simple'}, optional
'simple' will use a simple regular expression.
No other mode is implemented yet.
Returns
-------
str or None
A str if the user entered a likely email address.
None if the user pressed only Enter and ``empty`` was True.
"""
if mode == "simple":
s = _prompt_input(prompt)
if empty and not s:
return None
else:
if RE_EMAIL_SIMPLE.match(s):
return s
else:
return email(prompt=prompt, empty=empty, mode=mode)
else:
raise ValueError
|
Prompt an email address.
This check is based on a simple regular expression and does not verify
whether an email actually exists.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
mode : {'simple'}, optional
'simple' will use a simple regular expression.
No other mode is implemented yet.
Returns
-------
str or None
A str if the user entered a likely email address.
None if the user pressed only Enter and ``empty`` was True.
|
entailment
|
def integer(prompt=None, empty=False):
"""Prompt an integer.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
int or None
An int if the user entered a valid integer.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return int(s)
except ValueError:
return integer(prompt=prompt, empty=empty)
|
Prompt an integer.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
int or None
An int if the user entered a valid integer.
None if the user pressed only Enter and ``empty`` was True.
|
entailment
|
def real(prompt=None, empty=False):
"""Prompt a real number.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
float or None
A float if the user entered a valid real number.
None if the user pressed only Enter and ``empty`` was True.
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
try:
return float(s)
except ValueError:
return real(prompt=prompt, empty=empty)
|
Prompt a real number.
Parameters
----------
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
Returns
-------
float or None
A float if the user entered a valid real number.
None if the user pressed only Enter and ``empty`` was True.
|
entailment
|
def regex(pattern, prompt=None, empty=False, flags=0):
"""Prompt a string that matches a regular expression.
Parameters
----------
pattern : str
A regular expression that must be matched.
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
flags : int, optional
Flags that will be passed to ``re.match``.
Returns
-------
Match or None
A match object if the user entered a matching string.
None if the user pressed only Enter and ``empty`` was True.
See Also
--------
re.match
"""
s = _prompt_input(prompt)
if empty and not s:
return None
else:
m = re.match(pattern, s, flags=flags)
if m:
return m
else:
return regex(pattern, prompt=prompt, empty=empty, flags=flags)
|
Prompt a string that matches a regular expression.
Parameters
----------
pattern : str
A regular expression that must be matched.
prompt : str, optional
Use an alternative prompt.
empty : bool, optional
Allow an empty response.
flags : int, optional
Flags that will be passed to ``re.match``.
Returns
-------
Match or None
A match object if the user entered a matching string.
None if the user pressed only Enter and ``empty`` was True.
See Also
--------
re.match
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.