Search is not available for this dataset
text stringlengths 75 104k |
|---|
def open(self, packet_received):
"""
Opens the port.
:param packet_received: Callback which is invoked when we received a packet.
Is passed the peer, typename, and data.
:returns: Deferred that callbacks when we are ready to receive.
"""
def port_open(listeningport):
self._listeningport = listeningport
self.ownid = self.ownid_factory(listeningport)
logger.debug("Port opened. Own-ID:%s" % self.ownid)
return None
logger.debug("Opening connection pool")
self.packet_received = packet_received
d = self.stream_server_endpoint.listen(PoolFactory(self, self._typenames))
d.addCallback(port_open)
return d |
def pre_connect(self, peer):
"""
Ensures that we have an open connection to the given peer.
Returns the peer id. This should be equal to the given one, but
it might not if the given peer was, say, the IP and the peer
actually identifies itself with a host name. The returned peer
is the real one that should be used. This can be handy if we aren't
100% sure of the peer's identity.
"""
if peer in self._connections:
return defer.succeed(peer)
else:
d = self._connect(peer, exact_peer=False)
def connected(p):
return p.peer
d.addCallback(connected)
return d |
def send(self, peer, typename, data):
"""
Sends a packet to a peer.
"""
def attempt_to_send(_):
if peer not in self._connections:
d = self._connect(peer)
d.addCallback(attempt_to_send)
return d
else:
conn = self._connections[peer][0]
conn.send_packet(typename, data)
return defer.succeed(None)
d = attempt_to_send(None)
self._ongoing_sends.add(d)
def send_completed(result):
if d in self._ongoing_sends:
self._ongoing_sends.remove(d)
return result
d.addBoth(send_completed)
return d |
def close(self):
"""
Stop listing for new connections and close all open connections.
:returns: Deferred that calls back once everything is closed.
"""
def cancel_sends(_):
logger.debug("Closed port. Cancelling all on-going send operations...")
while self._ongoing_sends:
d = self._ongoing_sends.pop()
d.cancel()
def close_connections(_):
all_connections = [c for conns in self._connections.itervalues() for c in conns]
logger.debug("Closing all connections (there are %s)..." % len(all_connections))
for c in all_connections:
c.transport.loseConnection()
ds = [c.wait_for_close() for c in all_connections]
d = defer.DeferredList(ds, fireOnOneErrback=True)
def allclosed(_):
logger.debug("All connections closed.")
d.addCallback(allclosed)
return d
logger.debug("Closing connection pool...")
d = defer.maybeDeferred(self._listeningport.stopListening)
d.addCallback(cancel_sends)
d.addCallback(close_connections)
return d |
def get_config_value(self, section, key, return_type: type):
"""Read customer's config value by section and key.
:param section: config file's section. i.e [default]
:param key: config file's key under section. i.e packages_scan
:param return_type: return value type, str | int | bool.
"""
try:
value = self.method_mapping[return_type](section, key)
except NoSectionError as e:
raise ConfigError(e.message)
except NoOptionError as e:
raise ConfigError(e.message)
return value |
def nova(*arg):
"""
Nova annotation for adding function to process nova notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Nova, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
nova_customer_process_wildcard[event_type_pattern] = func
else:
nova_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def cinder(*arg):
"""
Cinder annotation for adding function to process cinder notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Cinder, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
cinder_customer_process_wildcard[event_type_pattern] = func
else:
cinder_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def neutron(*arg):
"""
Neutron annotation for adding function to process neutron notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Neutron, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
neutron_customer_process_wildcard[event_type_pattern] = func
else:
neutron_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def glance(*arg):
"""
Glance annotation for adding function to process glance notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Glance, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
glance_customer_process_wildcard[event_type_pattern] = func
else:
glance_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def swift(*arg):
"""
Swift annotation for adding function to process swift notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Swift, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
swift_customer_process_wildcard[event_type_pattern] = func
else:
swift_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def keystone(*arg):
"""
Swift annotation for adding function to process keystone notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Keystone, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
keystone_customer_process_wildcard[event_type_pattern] = func
else:
keystone_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def heat(*arg):
"""
Heat annotation for adding function to process heat notification.
if event_type include wildcard, will put {pattern: function} into process_wildcard dict
else will put {event_type: function} into process dict
:param arg: event_type of notification
"""
check_event_type(Openstack.Heat, *arg)
event_type = arg[0]
def decorator(func):
if event_type.find("*") != -1:
event_type_pattern = pre_compile(event_type)
heat_customer_process_wildcard[event_type_pattern] = func
else:
heat_customer_process[event_type] = func
log.info("add function {0} to process event_type:{1}".format(func.__name__, event_type))
@functools.wraps(func)
def wrapper(*args, **kwargs):
func(*args, **kwargs)
return wrapper
return decorator |
def addFactory(self, identifier, factory):
"""Adds a factory.
After calling this method, remote clients will be able to
connect to it.
This will call ``factory.doStart``.
"""
factory.doStart()
self._factories[identifier] = factory |
def removeFactory(self, identifier):
"""Removes a factory.
After calling this method, remote clients will no longer be
able to connect to it.
This will call the factory's ``doStop`` method.
"""
factory = self._factories.pop(identifier)
factory.doStop()
return factory |
def connect(self, factory):
"""Attempts to connect using a given factory.
This will find the requested factory and use it to build a
protocol as if the AMP protocol's peer was making the
connection. It will create a transport for the protocol and
connect it immediately. It will then store the protocol under
a unique identifier, and return that identifier.
"""
try:
factory = self._factories[factory]
except KeyError:
raise NoSuchFactory()
remote = self.getProtocol()
addr = remote.transport.getPeer()
proto = factory.buildProtocol(addr)
if proto is None:
raise ConnectionRefused()
identifier = uuid4().hex
transport = MultiplexedTransport(identifier, remote)
proto.makeConnection(transport)
self._protocols[identifier] = proto
return {"connection": identifier} |
def receiveData(self, connection, data):
"""
Receives some data for the given protocol.
"""
try:
protocol = self._protocols[connection]
except KeyError:
raise NoSuchConnection()
protocol.dataReceived(data)
return {} |
def disconnect(self, connection):
"""
Disconnects the given protocol.
"""
proto = self._protocols.pop(connection)
proto.transport = None
return {} |
def _callRemote(self, command, **kwargs):
"""Shorthand for ``callRemote``.
This uses the factory's connection to the AMP peer.
"""
return self.factory.remote.callRemote(command, **kwargs) |
def connectionMade(self):
"""Create a multiplexed stream connection.
Connect to the AMP server's multiplexed factory using the
identifier (defined by this class' factory). When done, stores
the connection reference and causes buffered data to be sent.
"""
log.msg("Creating multiplexed AMP connection...")
remoteFactoryIdentifier = self.factory.remoteFactoryIdentifier
d = self._callRemote(Connect, factory=remoteFactoryIdentifier)
d.addCallback(self._multiplexedConnectionMade) |
def _multiplexedConnectionMade(self, response):
"""Stores a reference to the connection, registers this protocol on
the factory as one related to a multiplexed AMP connection,
and sends currently buffered data. Gets rid of the buffer
afterwards.
"""
self.connection = conn = response["connection"]
self.factory.protocols[conn] = self
log.msg("Multiplexed AMP connection ({!r}) made!".format(conn))
data, self._buffer = self._buffer.getvalue(), None
if data:
log.msg("Sending {} bytes of buffered data...".format(len(data)))
self._sendData(data)
else:
log.msg("No buffered data to send!") |
def dataReceived(self, data):
"""Received some data from the local side.
If we have set up the multiplexed connection, sends the data
over the multiplexed connection. Otherwise, buffers.
"""
log.msg("{} bytes of data received locally".format(len(data)))
if self.connection is None:
# we haven't finished connecting yet
log.msg("Connection not made yet, buffering...")
self._buffer.write(data)
else:
log.msg("Sending data...")
self._sendData(data) |
def _sendData(self, data):
"""Actually sends data over the wire.
"""
d = self._callRemote(Transmit, connection=self.connection, data=data)
d.addErrback(log.err) |
def connectionLost(self, reason):
"""If we already have an AMP connection registered on the factory,
get rid of it.
"""
if self.connection is not None:
del self.factory.protocols[self.connection] |
def getLocalProtocol(self, connectionIdentifier):
"""Attempts to get a local protocol by connection identifier.
"""
for factory in self.localFactories:
try:
return factory.protocols[connectionIdentifier]
except KeyError:
continue
raise NoSuchConnection() |
def remoteDataReceived(self, connection, data):
"""Some data was received from the remote end. Find the matching
protocol and replay it.
"""
proto = self.getLocalProtocol(connection)
proto.transport.write(data)
return {} |
def disconnect(self, connection):
"""The other side has asked us to disconnect.
"""
proto = self.getLocalProtocol(connection)
proto.transport.loseConnection()
return {} |
def enqueue(self, s):
"""
Append `s` to the queue.
Equivalent to::
queue += s
if `queue` where a regular string.
"""
self._parts.append(s)
self._len += len(s) |
def dequeue(self, n):
"""
Remove and return the first `n` characters from the queue.
Throws an error if there are less than `n` characters in the queue.
Equivalent to::
s = queue[:n]
queue = queue[n:]
if `queue` where a regular string.
"""
if self._len < n:
raise ValueError("Not enough bytes in the queue")
self._len -= n
def part_generator(n):
"""
Returns the requested bytes in parts
"""
remaining = n
while remaining:
part = self._parts.popleft()
if len(part) <= remaining:
yield part
remaining -= len(part)
else:
yield part[:remaining]
self._parts.appendleft(part[remaining:])
remaining = 0
return "".join(part_generator(n)) |
def drop(self, n):
"""
Removes `n` bytes from the beginning of the queue.
Throws an error if there are less than `n` characters in the queue.
Equivalent to::
queue = queue[n:]
if `queue` where a regular string.
"""
if self._len < n:
raise ValueError("Not enough bytes in the queue")
self._len -= n
remaining = n
while remaining:
part = self._parts.popleft()
if len(part) <= remaining:
remaining -= len(part)
else:
self._parts.appendleft(part[remaining:])
remaining = 0 |
def peek(self, n):
"""
Return the first `n` characters from the queue without
removing them.
Throws an error if there are less than `n` characters in the queue.
Equivalent to::
s = queue[:n]
if `queue` where a regular string.
"""
if self._len < n:
raise ValueError("Not enough bytes in the queue")
def part_generator(n):
"""
Returns the requested bytes in parts
"""
remaining = n
for part in self._parts:
if len(part) <= remaining:
yield part
remaining -= len(part)
else:
yield part[:remaining]
remaining = 0
if remaining == 0:
break
return "".join(part_generator(n)) |
def centered(mystring, linewidth=None, fill=" "):
'''Takes a string, centres it, and pads it on both sides'''
if linewidth is None:
linewidth = get_terminal_size().columns - 1
sides = (linewidth - length_no_ansi(mystring))//2
extra = (linewidth - length_no_ansi(mystring)) % 2
fill = fill[:1]
sidestring = fill*sides
extrastring = fill*extra
newstring = sidestring + mystring + sidestring + extrastring
return newstring |
def clock_on_right(mystring):
'''Takes a string, and prints it with the time right aligned'''
taken = length_no_ansi(mystring)
padding = (get_terminal_size().columns - 1) - taken - 5
clock = time.strftime("%I:%M", time.localtime())
print(mystring + " "*padding + clock) |
def query_yes_no(question, default="yes"):
'''Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The return value is one of Answers.YES or Answers.NO.
Copied (and modified) from
http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input
'''
valid = {"yes": Answers.YES, "y": Answers.YES, "ye": Answers.YES,
"no": Answers.NO, "n": Answers.NO}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n") |
def query_yes_quit(question, default="quit"):
'''Ask a yes/quit question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "quit" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "quit".
Modified from
http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input
'''
valid = {"yes": Answers.YES, "y": Answers.YES, "ye": Answers.YES,
"quit": Answers.QUIT, "q": Answers.QUIT}
if default is None:
prompt = " [y/q] "
elif default == "yes":
prompt = " [Y/q] "
elif default == "quit":
prompt = " [y/Q] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'quit' "
"(or 'y' or 'q').\n") |
def wait(sec):
'''
Prints a timer with the format 0:00 to the console,
and then clears the line when the timer is done
'''
while sec > 0:
sys.stdout.write('\r' + str(sec//60).zfill(1) + ":" +
str(sec % 60).zfill(2) + ' ')
sec -= 1
time.sleep(1)
sys.stdout.write('\r' + ' ' + '\r') |
def version_number_str(major, minor=0, patch=0, prerelease=None, build=None):
"""
Takes the parts of a semantic version number, and returns a nicely
formatted string.
"""
version = str(major) + '.' + str(minor) + '.' + str(patch)
if prerelease:
if prerelease.startswith('-'):
version = version + prerelease
else:
version = version + "-" + str(prerelease)
if build:
if build.startswith('+'):
version = version + build
else:
version = version + "+" + str(build)
return(version) |
def get_terminal_size():
"""Returns terminal dimensions
:return: Returns ``(width, height)``. If there's no terminal
to be found, we'll just return ``(80, 24)``.
"""
try:
# shutil.get_terminal_size was added to the standard
# library in Python 3.3
try:
from shutil import get_terminal_size as _get_terminal_size # pylint: disable=no-name-in-module
except ImportError:
from backports.shutil_get_terminal_size import get_terminal_size as _get_terminal_size # pylint: disable=import-error
sz = _get_terminal_size()
except ValueError:
"""
This can result from the 'underlying buffer being detached', which
occurs during running the unittest on Windows (but not on Linux?)
"""
terminal_size = namedtuple('Terminal_Size', 'columns lines')
sz = terminal_size(80, 24)
return sz |
def identify_unit_framework(target_unit):
"""
Identify whether the user is requesting unit validation against
astropy.units, pint, or quantities.
"""
if HAS_ASTROPY:
from astropy.units import UnitBase
if isinstance(target_unit, UnitBase):
return ASTROPY
if HAS_PINT:
from pint.unit import UnitsContainer
if hasattr(target_unit, 'dimensionality') and isinstance(target_unit.dimensionality, UnitsContainer):
return PINT
if HAS_QUANTITIES:
from quantities.unitquantity import IrreducibleUnit
from quantities import Quantity
if isinstance(target_unit, IrreducibleUnit) or isinstance(target_unit, Quantity):
return QUANTITIES
raise TraitError("Could not identify unit framework for target unit of type {0}".format(type(target_unit).__name__)) |
def assert_unit_convertability(name, value, target_unit, unit_framework):
"""
Check that a value has physical type consistent with user-specified units
Note that this does not convert the value, only check that the units have
the right physical dimensionality.
Parameters
----------
name : str
The name of the value to check (used for error messages).
value : `numpy.ndarray` or instance of `numpy.ndarray` subclass
The value to check.
target_unit : unit
The unit that the value should be convertible to.
unit_framework : str
The unit framework to use
"""
if unit_framework == ASTROPY:
from astropy.units import Quantity
if not isinstance(value, Quantity):
raise TraitError("{0} should be given as an Astropy Quantity instance".format(name))
if not target_unit.is_equivalent(value.unit):
raise TraitError("{0} should be in units convertible to {1}".format(name, target_unit))
elif unit_framework == PINT:
from pint.unit import UnitsContainer
if not (hasattr(value, 'dimensionality') and isinstance(value.dimensionality, UnitsContainer)):
raise TraitError("{0} should be given as a Pint Quantity instance".format(name))
if value.dimensionality != target_unit.dimensionality:
raise TraitError("{0} should be in units convertible to {1}".format(name, target_unit))
elif unit_framework == QUANTITIES:
from quantities import Quantity
if not isinstance(value, Quantity):
raise TraitError("{0} should be given as a quantities Quantity instance".format(name))
if value.dimensionality.simplified != target_unit.dimensionality.simplified:
raise TraitError("{0} should be in units convertible to {1}".format(name, target_unit.dimensionality.string)) |
def pad(data_to_pad, block_size, style='pkcs7'):
"""Apply standard padding.
:Parameters:
data_to_pad : byte string
The data that needs to be padded.
block_size : integer
The block boundary to use for padding. The output length is guaranteed
to be a multiple of ``block_size``.
style : string
Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*.
:Return:
The original data with the appropriate padding added at the end.
"""
padding_len = block_size-len(data_to_pad)%block_size
if style == 'pkcs7':
padding = bchr(padding_len)*padding_len
elif style == 'x923':
padding = bchr(0)*(padding_len-1) + bchr(padding_len)
elif style == 'iso7816':
padding = bchr(128) + bchr(0)*(padding_len-1)
else:
raise ValueError("Unknown padding style")
return data_to_pad + padding |
def unpad(padded_data, block_size, style='pkcs7'):
"""Remove standard padding.
:Parameters:
padded_data : byte string
A piece of data with padding that needs to be stripped.
block_size : integer
The block boundary to use for padding. The input length
must be a multiple of ``block_size``.
style : string
Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*.
:Return:
Data without padding.
:Raises ValueError:
if the padding is incorrect.
"""
pdata_len = len(padded_data)
if pdata_len % block_size:
raise ValueError("Input data is not padded")
if style in ('pkcs7', 'x923'):
padding_len = bord(padded_data[-1])
if padding_len<1 or padding_len>min(block_size, pdata_len):
raise ValueError("Padding is incorrect.")
if style == 'pkcs7':
if padded_data[-padding_len:]!=bchr(padding_len)*padding_len:
raise ValueError("PKCS#7 padding is incorrect.")
else:
if padded_data[-padding_len:-1]!=bchr(0)*(padding_len-1):
raise ValueError("ANSI X.923 padding is incorrect.")
elif style == 'iso7816':
padding_len = pdata_len - padded_data.rfind(bchr(128))
if padding_len<1 or padding_len>min(block_size, pdata_len):
raise ValueError("Padding is incorrect.")
if padding_len>1 and padded_data[1-padding_len:]!=bchr(0)*(padding_len-1):
raise ValueError("ISO 7816-4 padding is incorrect.")
else:
raise ValueError("Unknown padding style")
return padded_data[:-padding_len] |
def make_federation_entity(config, eid='', httpcli=None, verify_ssl=True):
"""
Construct a :py:class:`fedoidcmsg.entity.FederationEntity` instance based
on given configuration.
:param config: Federation entity configuration
:param eid: Entity ID
:param httpcli: A http client instance to use when sending HTTP requests
:param verify_ssl: Whether TLS/SSL certificates should be verified
:return: A :py:class:`fedoidcmsg.entity.FederationEntity` instance
"""
args = {}
if not eid:
try:
eid = config['entity_id']
except KeyError:
pass
if 'self_signer' in config:
self_signer = make_internal_signing_service(config['self_signer'],
eid)
args['self_signer'] = self_signer
try:
bundle_cnf = config['fo_bundle']
except KeyError:
pass
else:
_args = dict([(k, v) for k, v in bundle_cnf.items() if k in KJ_SPECS])
if _args:
_kj = init_key_jar(**_args)
else:
_kj = None
if 'dir' in bundle_cnf:
jb = FSJWKSBundle(eid, _kj, bundle_cnf['dir'],
key_conv={'to': quote_plus, 'from': unquote_plus})
else:
jb = JWKSBundle(eid, _kj)
args['fo_bundle'] = jb
for item in ['context', 'entity_id', 'fo_priority', 'mds_owner']:
try:
args[item] = config[item]
except KeyError:
pass
if 'entity_id' not in args:
args['entity_id'] = eid
# These are mutually exclusive
if 'sms_dir' in config:
args['sms_dir'] = config['sms_dir']
return FederationEntityOOB(httpcli, iss=eid, **args)
elif 'mds_service' in config:
args['verify_ssl'] = verify_ssl
args['mds_service'] = config['mds_service']
return FederationEntityAMS(httpcli, iss=eid, **args)
elif 'mdss_endpoint' in config:
args['verify_ssl'] = verify_ssl
# These are mandatory for this type of entity
for key in ['mdss_endpoint', 'mdss_owner', 'mdss_keys']:
args[key] = config[key]
return FederationEntitySwamid(httpcli, iss=eid, **args) |
def pick_signed_metadata_statements_regex(self, pattern, context):
"""
Pick signed metadata statements based on ISS pattern matching
:param pattern: A regular expression to match the iss against
:return: list of tuples (FO ID, signed metadata statement)
"""
comp_pat = re.compile(pattern)
sms_dict = self.signer.metadata_statements[context]
res = []
for iss, vals in sms_dict.items():
if comp_pat.search(iss):
res.extend((iss, vals))
return res |
def pick_signed_metadata_statements(self, fo, context):
"""
Pick signed metadata statements based on ISS pattern matching
:param fo: Federation operators ID
:param context: In connect with which operation (one of the values in
:py:data:`fedoidc.CONTEXTS`).
:return: list of tuples (FO ID, signed metadata statement)
"""
sms_dict = self.signer.metadata_statements[context]
res = []
for iss, vals in sms_dict.items():
if iss == fo:
res.extend((iss, vals))
return res |
def get_metadata_statement(self, input, cls=MetadataStatement,
context=''):
"""
Unpack and evaluate a compound metadata statement. Goes through the
necessary three steps.
* unpack the metadata statement
* verify that the given statements are expected to be used in this context
* evaluate the metadata statements (= flatten)
:param input: The metadata statement as a JSON document or a
dictionary
:param cls: The class the response should be typed into
:param context: In which context the metadata statement should be used.
:return: A list of :py:class:`fedoidc.operator.LessOrEqual` instances
"""
logger.debug('Incoming metadata statement: {}'.format(input))
if isinstance(input, dict):
data = input
else:
if isinstance(input, Message):
data = input.to_dict()
else:
data = json.loads(input)
_pi = self.unpack_metadata_statement(ms_dict=data, cls=cls)
if not _pi.result:
return []
logger.debug('Managed to unpack the metadata statement')
if context:
_cms = self.correct_usage(_pi.result, context)
else:
_cms = _pi.result
logger.debug('After filtering for correct usage: {}'.format(_cms))
if _cms:
return self.evaluate_metadata_statement(_cms)
else:
return [] |
def self_sign(self, req, receiver='', aud=None):
"""
Sign the extended request.
:param req: Request, a :py:class:`fedoidcmsg.MetadataStatement' instance
:param receiver: The intended user of this metadata statement
:param aud: The audience, a list of receivers.
:return: An augmented set of request arguments
"""
if self.entity_id:
_iss = self.entity_id
else:
_iss = self.iss
creq = req.copy()
if not 'metadata_statement_uris' in creq and not \
'metadata_statements' in creq:
_copy = creq.copy()
_jws = self.self_signer.sign(_copy, receiver=receiver, iss=_iss,
aud=aud)
sms_spec = {'metadata_statements': {self.iss: _jws}}
else:
for ref in ['metadata_statement_uris', 'metadata_statements']:
try:
del creq[ref]
except KeyError:
pass
sms_spec = {'metadata_statements': Message()}
for ref in ['metadata_statement_uris', 'metadata_statements']:
if ref not in req:
continue
for foid, value in req[ref].items():
_copy = creq.copy()
_copy[ref] = Message()
_copy[ref][foid] = value
_jws = self.self_signer.sign(_copy, receiver=receiver,
iss=_iss, aud=aud)
sms_spec['metadata_statements'][foid] = _jws
creq.update(sms_spec)
return creq |
def update_metadata_statement(self, metadata_statement, receiver='',
federation=None, context=''):
"""
Update a metadata statement by:
* adding signed metadata statements or uris pointing to signed
metadata statements.
* adding the entities signing keys
* create metadata statements one per signed metadata statement or uri
sign these and add them to the metadata statement
:param metadata_statement: A :py:class:`fedoidcmsg.MetadataStatement`
instance
:param receiver: The intended receiver of the metadata statement
:param federation:
:param context:
:return: An augmented metadata statement
"""
self.add_sms_spec_to_request(metadata_statement, federation=federation,
context=context)
self.add_signing_keys(metadata_statement)
metadata_statement = self.self_sign(metadata_statement, receiver)
# These are unprotected here so can as well be removed
del metadata_statement['signing_keys']
return metadata_statement |
def add_sms_spec_to_request(self, req, federation='', loes=None,
context=''):
"""
Update a request with signed metadata statements.
:param req: The request
:param federation: Federation Operator ID
:param loes: List of :py:class:`fedoidc.operator.LessOrEqual` instances
:param context:
:return: The updated request
"""
if federation: # A specific federation or list of federations
if isinstance(federation, list):
req.update(self.gather_metadata_statements(federation,
context=context))
else:
req.update(self.gather_metadata_statements([federation],
context=context))
else: # All federations I belong to
if loes:
_fos = list([r.fo for r in loes])
req.update(self.gather_metadata_statements(_fos,
context=context))
else:
req.update(self.gather_metadata_statements(context=context))
return req |
def gather_metadata_statements(self, fos=None, context=''):
"""
Only gathers metadata statements and returns them.
:param fos: Signed metadata statements from these Federation Operators
should be added.
:param context: context of the metadata exchange
:return: Dictionary with signed Metadata Statements as values
"""
if not context:
context = self.context
_res = {}
if self.metadata_statements:
try:
cms = self.metadata_statements[context]
except KeyError:
if self.metadata_statements == {
'register': {},
'discovery': {},
'response': {}
}:
# No superior so an FO then. Nothing to add ..
pass
else:
logger.error(
'No metadata statements for this context: {}'.format(
context))
raise ValueError('Wrong context "{}"'.format(context))
else:
if cms != {}:
if fos is None:
fos = list(cms.keys())
for f in fos:
try:
val = cms[f]
except KeyError:
continue
if val.startswith('http'):
value_type = 'metadata_statement_uris'
else:
value_type = 'metadata_statements'
try:
_res[value_type][f] = val
except KeyError:
_res[value_type] = Message()
_res[value_type][f] = val
return _res |
def add_sms_spec_to_request(self, req, federation='', loes=None,
context='', url=''):
"""
Add signed metadata statements to the request
:param req: The request so far
:param federation: If only signed metadata statements from a specific
set of federations should be included this is the set.
:param loes: - not used -
:param context: What kind of request/response it is: 'registration',
'discovery' or 'response'. The later being registration response.
:param url: Just for testing !!
:return: A possibly augmented request.
"""
# fetch the signed metadata statement collection
if federation:
if not isinstance(federation, list):
federation = [federation]
if not url:
url = "{}/getms/{}/{}".format(self.mds_service, context,
quote_plus(self.entity_id))
http_resp = self.httpcli(method='GET', url=url, verify=self.verify_ssl)
if http_resp.status_code >= 400:
raise ConnectionError('HTTP Error: {}'.format(http_resp.text))
# verify signature on response
msg = JsonWebToken().from_jwt(http_resp.text,
keyjar=self.jwks_bundle[self.mds_owner])
if msg['iss'] != self.mds_owner:
raise KeyError('Wrong iss')
if federation:
_ms = dict(
[(fo, _ms) for fo, _ms in msg.items() if fo in federation])
else:
_ms = msg.extra()
try:
del _ms['kid']
except KeyError:
pass
_sms = {}
_smsu = {}
for fo, item in _ms.items():
if item.startswith('https://') or item.startswith('http://'):
_smsu[fo] = item
else:
_sms[fo] = item
if _sms:
req.update({'metadata_statements': _sms})
if _smsu:
req.update({'metadata_statement_uris': _smsu})
return req |
def add_sms_spec_to_request(self, req, federation='', loes=None,
context='', url=''):
"""
Add signed metadata statements to the request
:param req: The request so far
:param federation: If only signed metadata statements from a specific
set of federations should be included this is the set.
:param loes: - not used -
:param context: What kind of request/response it is: 'registration',
'discovery' or 'response'. The later being registration response.
:param url: Just for testing !!
:return: A possibly augmented request.
"""
# fetch the signed metadata statement collection
if federation:
if not isinstance(federation, list):
federation = [federation]
if not url:
url = "{}/getsmscol/{}/{}".format(self.mdss_endpoint, context,
quote_plus(self.entity_id))
http_resp = self.httpcli(method='GET', url=url, verify=self.verify_ssl)
if http_resp.status_code >= 400:
raise ConnectionError('HTTP Error: {}'.format(http_resp.text))
msg = JsonWebToken().from_jwt(http_resp.text, keyjar=self.mdss_keys)
if msg['iss'] != self.mdss_owner:
raise KeyError('Wrong iss')
if federation:
_sms = dict(
[(fo, _ms) for fo, _ms in msg.items() if fo in federation])
else:
_sms = msg.extra()
try:
del _sms['kid']
except KeyError:
pass
req.update({'metadata_statement_uris': _sms})
return req |
def pretty_print(input_word, anagrams, by_length=False):
"""Prints the anagram results sorted by score to stdout.
Args:
input_word: the base word we searched on
anagrams: generator of (word, score) from anagrams_in_word
by_length: a boolean to declare printing by length instead of score
"""
scores = {}
if by_length:
noun = "tiles"
for word, score in anagrams:
try:
scores[len(word)].append("{0} ({1:d})".format(word, score))
except KeyError:
scores[len(word)] = ["{0} ({1:d})".format(word, score)]
else:
noun = "points"
for word, score in anagrams:
try:
scores[score].append(word)
except KeyError:
scores[score] = [word]
print("Anagrams for {0}{1}:".format(input_word, " (score)" * by_length))
if not valid_scrabble_word(input_word):
print("{0} is not possible in Scrabble.".format(input_word))
for key, value in sorted(scores.items(), reverse=True):
print("{0:d} {1}: {2}".format(key, noun, ", ".join(value))) |
def argument_parser(args):
"""Argparse logic, command line options.
Args:
args: sys.argv[1:], everything passed to the program after its name
Returns:
A tuple of:
a list of words/letters to search
a boolean to declare if we want to use the sowpods words file
a boolean to declare if we want to output anagrams by length
a string of starting characters to find anagrams based on
a string of ending characters to find anagrams based on
Raises:
SystemExit if the user passes invalid arguments, --version or --help
"""
parser = argparse.ArgumentParser(
prog="nagaram",
description="Finds Scabble anagrams.",
formatter_class=argparse.RawDescriptionHelpFormatter,
add_help=False,
)
parser.add_argument(
"-h", "--help",
dest="help",
action="store_true",
default=False,
)
parser.add_argument(
"--sowpods",
dest="sowpods",
action="store_true",
default=False,
)
parser.add_argument(
"--length",
"-l",
dest="length",
action="store_true",
default=False,
)
parser.add_argument(
"--starts-with",
"-s",
dest="starts_with",
metavar="chars",
default="",
nargs=1,
type=str,
)
parser.add_argument(
"--ends-with",
"-e",
dest="ends_with",
metavar="chars",
default="",
nargs=1,
type=str,
)
parser.add_argument(
"--version",
"-v",
action="version",
version="Nagaram {0} (Released: {1})".format(
nagaram.__version__,
nagaram.__release_date__,
)
)
parser.add_argument(
dest="wordlist",
metavar="letters to find anagrams with (? for anything, _ for blanks)",
nargs=argparse.REMAINDER,
)
settings = parser.parse_args(args)
if settings.help:
raise SystemExit(nagaram.__doc__.strip())
if not settings.wordlist:
raise SystemExit(parser.print_usage())
if settings.starts_with:
settings.starts_with = settings.starts_with[0]
if settings.ends_with:
settings.ends_with = settings.ends_with[0]
return (settings.wordlist, settings.sowpods, settings.length,
settings.starts_with, settings.ends_with) |
def main(arguments=None):
"""Main command line entry point."""
if not arguments:
arguments = sys.argv[1:]
wordlist, sowpods, by_length, start, end = argument_parser(arguments)
for word in wordlist:
pretty_print(
word,
anagrams_in_word(word, sowpods, start, end),
by_length,
) |
def register_type(self, typename):
"""
Registers a type name so that it may be used to send and receive packages.
:param typename: Name of the packet type. A method with the same name and a
"on_" prefix should be added to handle incomming packets.
:raises ValueError: If there is a hash code collision.
"""
typekey = typehash(typename)
if typekey in self._type_register:
raise ValueError("Type name collision. Type %s has the same hash." % repr(self._type_register[typekey]))
self._type_register[typekey] = typename |
def dataReceived(self, data):
"""
Do not overwrite this method. Instead implement `on_...` methods for the
registered typenames to handle incomming packets.
"""
self._unprocessed_data.enqueue(data)
while True:
if len(self._unprocessed_data) < self._header.size:
return # not yet enough data
hdr_data = self._unprocessed_data.peek(self._header.size)
packet_length, typekey = self._header.unpack(hdr_data)
total_length = self._header.size + packet_length
if len(self._unprocessed_data) < total_length:
return # not yet enough data
self._unprocessed_data.drop(self._header.size)
packet = self._unprocessed_data.dequeue(packet_length)
self._start_receive = None
typename = self._type_register.get(typekey, None)
if typename is None:
self.on_unregistered_type(typekey, packet)
else:
self.packet_received(typename, packet) |
def send_packet(self, typename, packet):
"""
Send a packet.
:param typename: A previously registered typename.
:param packet: String with the content of the packet.
"""
typekey = typehash(typename)
if typename != self._type_register.get(typekey, None):
raise ValueError("Cannot send packet with unregistered type %s." % repr(typename))
hdr = self._header.pack(len(packet), typekey)
self.transport.writeSequence([hdr, packet]) |
def on_unregistered_type(self, typekey, packet):
"""
Invoked if a packet with an unregistered type was received.
Default behaviour is to log and close the connection.
"""
log.msg("Missing handler for typekey %s in %s. Closing connection." % (typekey, type(self).__name__))
self.transport.loseConnection() |
def create_tcp_rpc_system(hostname=None, port_range=(0,), ping_interval=1, ping_timeout=0.5):
"""
Creates a TCP based :class:`RPCSystem`.
:param port_range: List of ports to try. If `[0]`, an arbitrary free
port will be used.
"""
def ownid_factory(listeningport):
port = listeningport.getHost().port
return "%s:%s" %(hostname, port)
def make_client_endpoint(peer):
host, port = peer.split(":")
if host == socket.getfqdn():
host = "localhost"
return endpoints.TCP4ClientEndpoint(reactor, host, int(port), timeout=5)
if hostname is None:
hostname = socket.getfqdn()
server_endpointA = TCP4ServerRangeEndpoint(reactor, port_range)
pool = connectionpool.ConnectionPool(server_endpointA, make_client_endpoint, ownid_factory)
return RPCSystem(pool, ping_interval=ping_interval, ping_timeout=ping_timeout) |
def open(self):
"""
Opens the port.
:returns: Deferred that callbacks when we are ready to make and receive calls.
"""
logging.debug("Opening rpc system")
d = self._connectionpool.open(self._packet_received)
def opened(_):
logging.debug("RPC system is open")
self._opened = True
logging.debug("Starting ping loop")
self._ping_loop.start(self._ping_interval, now=False)
d.addCallback(opened)
return d |
def close(self):
"""
Stop listing for new connections and close all open connections.
:returns: Deferred that calls back once everything is closed.
"""
assert self._opened, "RPC System is not opened"
logger.debug("Closing rpc system. Stopping ping loop")
self._ping_loop.stop()
if self._ping_current_iteration:
self._ping_current_iteration.cancel()
return self._connectionpool.close() |
def get_function_url(self, function):
"""
Registers the given callable in the system (if it isn't already)
and returns the URL that can be used to invoke the given function from remote.
"""
assert self._opened, "RPC System is not opened"
logging.debug("get_function_url(%s)" % repr(function))
if function in ~self._functions:
functionid = self._functions[:function]
else:
functionid = uuid.uuid1()
self._functions[functionid] = function
return "anycall://%s/functions/%s" % (self._connectionpool.ownid, functionid.hex) |
def create_function_stub(self, url):
"""
Create a callable that will invoke the given remote function.
The stub will return a deferred even if the remote function does not.
"""
assert self._opened, "RPC System is not opened"
logging.debug("create_function_stub(%s)" % repr(url))
parseresult = urlparse.urlparse(url)
scheme = parseresult.scheme
path = parseresult.path.split("/")
if scheme != "anycall":
raise ValueError("Not an anycall URL: %s" % repr(url))
if len(path) != 3 or path[0] != "" or path[1] != "functions":
raise ValueError("Not an URL for a remote function: %s" % repr(url))
try:
functionid = uuid.UUID(path[2])
except ValueError:
raise ValueError("Not a valid URL for a remote function: %s" % repr(url))
return _RPCFunctionStub(parseresult.netloc, functionid, self) |
def _ping_loop_iteration(self):
"""
Called every `ping_interval` seconds.
Invokes `_ping()` remotely for every ongoing call.
"""
deferredList = []
for peerid, callid in list(self._local_to_remote):
if (peerid, callid) not in self._local_to_remote:
continue # call finished in the meantime
logger.debug("sending ping")
d = self._invoke_function(peerid, self._PING, (self._connectionpool.ownid, callid), {})
#twistit.timeout_deferred(d, self._ping_timeout, "Lost communication to peer during call.")
def failed(failure):
if (peerid, callid) in self._local_to_remote:
d = self._local_to_remote.pop((peerid, callid))
d.errback(failure)
def success(value):
logger.debug("received pong")
return value
d.addCallbacks(success, failed)
deferredList.append(d)
d = defer.DeferredList(deferredList)
def done(_):
self._ping_current_iteration = None
self._ping_current_iteration = d
d.addBoth(done)
return d |
def _ping(self, peerid, callid):
"""
Called from remote to ask if a call made to here is still in progress.
"""
if not (peerid, callid) in self._remote_to_local:
logger.warn("No remote call %s from %s. Might just be unfoutunate timing." % (callid, peerid)) |
def register_app_for_error_handling(wsgi_app, app_name, app_logger, custom_logging_service=None):
"""Wraps a WSGI app and handles uncaught exceptions and defined exception and outputs a the exception in a
structured format.
Parameters:
- wsgi_app is the app.wsgi_app of flask,
- app_name should in correct format e.g. APP_NAME_1,
- app_logger is the logger object"""
logging_service = LoggingService(app_logger) if custom_logging_service is None else custom_logging_service
exception_manager = ExceptionHandler(app_name, logging_service)
def wrapper(environ, start_response):
try:
return wsgi_app(environ, start_response)
except RootException as e:
app_request = Request(environ)
stack_trace = traceback.format_exc().splitlines()[-1]
exception_manager.update_with_exception_data(e, app_request, stack_trace)
except Exception:
app_request = Request(environ)
stack_trace = traceback.format_exc()
e = RootException("FATAL_000", {}, {}, {}, status_code=500)
e.error_message = "Unknown System Error"
exception_manager.update_with_exception_data(e, app_request, stack_trace)
error_details = exception_manager.construct_error_details()
http_status_code = exception_manager.get_http_status_code()
response = Response(json.dumps(error_details), status=http_status_code, content_type='application/json')
return response(environ, start_response)
return wrapper |
def _cmdRegex(self, cmd_grp=None):
"""Get command regex string and completer dict."""
cmd_grp = cmd_grp or "cmd"
help_opts = ("-h", "--help")
cmd = self.name()
names = "|".join([re.escape(cmd)] +
[re.escape(a) for a in self.aliases()])
opts = []
for action in self.parser._actions:
opts += [a for a in action.option_strings
if a not in help_opts]
opts_re = "|".join([re.escape(o) for o in opts])
if opts_re:
opts_re = rf"(\s+(?P<{cmd_grp}_opts>{opts_re}))*"
help_re = "|".join([re.escape(o) for o in help_opts])
help_re = rf"(\s+(?P<HELP_OPTS>{help_re}))*"
completers = {}
if opts_re:
completers[f"{cmd_grp}_opts"] = WordCompleter(opts)
# Singe Help completer added elsewhere
return tuple([
rf"""(?P<{cmd_grp}>{names}){opts_re}{help_re}""",
completers
]) |
def fromStringProto(self, inString, proto):
"""
Defers to `amp.AmpList`, then gets the element from the list.
"""
value, = amp.AmpList.fromStringProto(self, inString, proto)
return value |
def toStringProto(self, inObject, proto):
"""
Wraps the object in a list, and then defers to ``amp.AmpList``.
"""
return amp.AmpList.toStringProto(self, [inObject], proto) |
def unfurl(jwt):
"""
Return the body of a signed JWT, without verifying the signature.
:param jwt: A signed JWT
:return: The body of the JWT as a 'UTF-8' string
"""
_rp_jwt = factory(jwt)
return json.loads(_rp_jwt.jwt.part[1].decode('utf8')) |
def keyjar_from_metadata_statements(iss, msl):
"""
Builds a keyJar instance based on the information in the 'signing_keys'
claims in a list of metadata statements.
:param iss: Owner of the signing keys
:param msl: List of :py:class:`MetadataStatement` instances.
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
keyjar = KeyJar()
for ms in msl:
keyjar.import_jwks(ms['signing_keys'], iss)
return keyjar |
def read_jwks_file(jwks_file):
"""
Reads a file containing a JWKS and populates a
:py:class:`oidcmsg.key_jar.KeyJar` from it.
:param jwks_file: file name of the JWKS file
:return: A :py:class:`oidcmsg.key_jar.KeyJar` instance
"""
_jwks = open(jwks_file, 'r').read()
_kj = KeyJar()
_kj.import_jwks(json.loads(_jwks), '')
return _kj |
def is_lesser(a, b):
"""
Verify that an item *a* is <= then an item *b*
:param a: An item
:param b: Another item
:return: True or False
"""
if type(a) != type(b):
return False
if isinstance(a, str) and isinstance(b, str):
return a == b
elif isinstance(a, bool) and isinstance(b, bool):
return a == b
elif isinstance(a, list) and isinstance(b, list):
for element in a:
flag = 0
for e in b:
if is_lesser(element, e):
flag = 1
break
if not flag:
return False
return True
elif isinstance(a, dict) and isinstance(b, dict):
if is_lesser(list(a.keys()), list(b.keys())):
for key, val in a.items():
if not is_lesser(val, b[key]):
return False
return True
return False
elif isinstance(a, int) and isinstance(b, int):
return a <= b
elif isinstance(a, float) and isinstance(b, float):
return a <= b
return False |
def verify(self, **kwargs):
"""
Verifies that an instance of this class adheres to the given
restrictions.
:param kwargs: A set of keyword arguments
:return: True if it verifies OK otherwise False.
"""
super(MetadataStatement, self).verify(**kwargs)
if "signing_keys" in self:
if 'signing_keys_uri' in self:
raise VerificationError(
'You can only have one of "signing_keys" and '
'"signing_keys_uri" in a metadata statement')
else:
# signing_keys MUST be a JWKS
kj = KeyJar()
try:
kj.import_jwks(self['signing_keys'], '')
except Exception:
raise VerificationError('"signing_keys" not a proper JWKS')
if "metadata_statements" in self and "metadata_statement_uris" in self:
s = set(self['metadata_statements'].keys())
t = set(self['metadata_statement_uris'].keys())
if s.intersection(t):
raise VerificationError(
'You should not have the same key in "metadata_statements" '
'and in "metadata_statement_uris"')
return True |
def _parse_remote_response(self, response):
"""
Parse simple JWKS or signed JWKS from the HTTP response.
:param response: HTTP response from the 'jwks_uri' or 'signed_jwks_uri'
endpoint
:return: response parsed as JSON or None
"""
# Check if the content type is the right one.
try:
if response.headers["Content-Type"] == 'application/json':
logger.debug(
"Loaded JWKS: %s from %s" % (response.text, self.source))
try:
return json.loads(response.text)
except ValueError:
return None
elif response.headers["Content-Type"] == 'application/jwt':
logger.debug(
"Signed JWKS: %s from %s" % (response.text, self.source))
_jws = factory(response.text)
_resp = _jws.verify_compact(
response.text, keys=self.verify_keys.get_signing_key())
return _resp
else:
logger.error('Wrong content type: {}'.format(
response.headers['Content-Type']))
raise ValueError('Content-type mismatch')
except KeyError:
pass |
def dump(filename, dbname, username=None, password=None, host=None,
port=None, tempdir='/tmp', pg_dump_path='pg_dump', format='p'):
"""Performs a pg_dump backup.
It runs with the current systemuser's privileges, unless you specify
username and password.
By default pg_dump connects to the value given in the PGHOST environment
variable.
You can either specify "hostname" and "port" or a socket path.
pg_dump expects the pg_dump-utility to be on $PATCH.
Should that not be case you are allowed to specify a custom location with
"pg_dump_path"
Format is p (plain / default), c = custom, d = directory, t=tar
returns statuscode and shelloutput
"""
filepath = os.path.join(tempdir, filename)
cmd = pg_dump_path
cmd += ' --format %s' % format
cmd += ' --file ' + os.path.join(tempdir, filename)
if username:
cmd += ' --username %s' % username
if host:
cmd += ' --host %s' % host
if port:
cmd += ' --port %s' % port
cmd += ' ' + dbname
## export pgpasswd
if password:
os.environ["PGPASSWORD"] = password
## run pgdump
return sh(cmd) |
def _connection(username=None, password=None, host=None, port=None, db=None):
"returns a connected cursor to the database-server."
c_opts = {}
if username: c_opts['user'] = username
if password: c_opts['password'] = password
if host: c_opts['host'] = host
if port: c_opts['port'] = port
if db: c_opts['database'] = db
dbc = psycopg2.connect(**c_opts)
dbc.autocommit = True
return dbc |
def db_list(username=None, password=None, host=None, port=None,
maintain_db='postgres'):
"returns a list of all databases on this server"
conn = _connection(username=username, password=password, host=host,
port=port, db=maintain_db)
cur = conn.cursor()
cur.execute('SELECT DATNAME from pg_database')
rows = cur.fetchall()
conn.close()
result = []
for row in rows:
result.append(row[0])
return result |
def _get_local_files(self, path):
"""Returns a dictionary of all the files under a path."""
if not path:
raise ValueError("No path specified")
files = defaultdict(lambda: None)
path_len = len(path) + 1
for root, dirs, filenames in os.walk(path):
for name in filenames:
full_path = join(root, name)
files[full_path[path_len:]] = compute_md5(full_path)
return files |
def sync_folder(self, path, bucket):
"""Syncs a local directory with an S3 bucket.
Currently does not delete files from S3 that are not in the local directory.
path: The path to the directory to sync to S3
bucket: The name of the bucket on S3
"""
bucket = self.conn.get_bucket(bucket)
local_files = self._get_local_files(path)
s3_files = self._get_s3_files(bucket)
for filename, hash in local_files.iteritems():
s3_key = s3_files[filename]
if s3_key is None:
s3_key = Key(bucket)
s3_key.key = filename
s3_key.etag = '"!"'
if s3_key.etag[1:-1] != hash[0]:
s3_key.set_contents_from_filename(join(path, filename), md5=hash) |
def sync(self, folders):
"""Syncs a list of folders to their assicated buckets.
folders: A list of 2-tuples in the form (folder, bucket)
"""
if not folders:
raise ValueError("No folders to sync given")
for folder in folders:
self.sync_folder(*folder) |
def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = request_passes_test(
lambda r: r.session.get('user_token'),
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator |
def permission_required(function=None, permission=None, object_id=None,
redirect_field_name=REDIRECT_FIELD_NAME,
login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = request_passes_test(
lambda r: has_permission(r.session.get('user_permissions'), permission, object_id), # noqa
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator |
def tokens_required(service_list):
"""
Ensure the user has the necessary tokens for the specified services
"""
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
for service in service_list:
if service not in request.session["user_tokens"]:
return redirect('denied')
return func(request, *args, **kwargs)
return inner
return decorator |
def login(request, template_name='ci/login.html',
redirect_field_name=REDIRECT_FIELD_NAME,
authentication_form=AuthenticationForm):
"""
Displays the login form and handles the login action.
"""
redirect_to = request.POST.get(redirect_field_name,
request.GET.get(redirect_field_name, ''))
if request.method == "POST":
form = authentication_form(request, data=request.POST)
if form.is_valid():
# Ensure the user-originating redirection url is safe.
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
# Okay, security check complete. Get the user object from auth api.
user = form.get_user()
request.session['user_token'] = user["token"]
request.session['user_email'] = user["email"]
request.session['user_permissions'] = user["permissions"]
request.session['user_id'] = user["id"]
request.session['user_list'] = user["user_list"]
if not settings.HIDE_DASHBOARDS:
# Set user dashboards because they are slow to change
dashboards = ciApi.get_user_dashboards(user["id"])
dashboard_list = list(dashboards['results'])
if len(dashboard_list) > 0:
request.session['user_dashboards'] = \
dashboard_list[0]["dashboards"]
request.session['user_default_dashboard'] = \
dashboard_list[0]["default_dashboard"]["id"]
else:
request.session['user_dashboards'] = []
request.session['user_default_dashboard'] = None
# Get the user access tokens too and format for easy access
tokens = ciApi.get_user_service_tokens(
params={"user_id": user["id"]})
token_list = list(tokens['results'])
user_tokens = {}
if len(token_list) > 0:
for token in token_list:
user_tokens[token["service"]["name"]] = {
"token": token["token"],
"url": token["service"]["url"] + "/api/v1"
}
request.session['user_tokens'] = user_tokens
return HttpResponseRedirect(redirect_to)
else:
form = authentication_form(request)
current_site = get_current_site(request)
context = {
'form': form,
redirect_field_name: redirect_to,
'site': current_site,
'site_name': current_site.name,
}
return TemplateResponse(request, template_name, context) |
def build(cli, path, package):
"""Build CLI dynamically based on the package structure.
"""
for _, name, ispkg in iter_modules(path):
module = import_module(f'.{name}', package)
if ispkg:
build(cli.group(name)(module.group),
module.__path__,
module.__package__)
else:
cli.command(name)(module.command) |
def readonly(cls, *args, **kwargs):
"""
Return an already closed read-only instance of Fridge.
Arguments are the same as for the constructor.
"""
fridge = cls(*args, **kwargs)
fridge.close()
return fridge |
def load(self):
"""
Force reloading the data from the file.
All data in the in-memory dictionary is discarded.
This method is called automatically by the constructor, normally you
don't need to call it.
"""
self._check_open()
try:
data = json.load(self.file, **self.load_args)
except ValueError:
data = {}
if not isinstance(data, dict):
raise ValueError('Root JSON type must be dictionary')
self.clear()
self.update(data) |
def save(self):
"""
Force saving the dictionary to the file.
All data in the file is discarded.
This method is called automatically by :meth:`close`.
"""
self._check_open()
self.file.truncate(0)
self.file.seek(0)
json.dump(self, self.file, **self.dump_args) |
def close(self):
"""
Close the fridge.
Calls :meth:`save` and closes the underlying file object unless
an already open file was passed to the constructor.
This method has no effect if the object is already closed.
After the fridge is closed :meth:`save` and :meth:`load` will raise an exception
but you will still be able to use it as an ordinary dictionary.
"""
if not self.closed:
self.save()
if self.close_file:
self.file.close()
self.closed = True |
def self_sign_jwks(keyjar, iss, kid='', lifetime=3600):
"""
Create a signed JWT containing a JWKS. The JWT is signed by one of the
keys in the JWKS.
:param keyjar: A KeyJar instance with at least one private signing key
:param iss: issuer of the JWT, should be the owner of the keys
:param kid: A key ID if a special key should be used otherwise one
is picked at random.
:param lifetime: The lifetime of the signed JWT
:return: A signed JWT
"""
# _json = json.dumps(jwks)
_jwt = JWT(keyjar, iss=iss, lifetime=lifetime)
jwks = keyjar.export_jwks(issuer=iss)
return _jwt.pack(payload={'jwks': jwks}, owner=iss, kid=kid) |
def verify_self_signed_jwks(sjwt):
"""
Verify the signature of a signed JWT containing a JWKS.
The JWT is signed by one of the keys in the JWKS.
In the JWT the JWKS is stored using this format ::
'jwks': {
'keys': [ ]
}
:param sjwt: Signed Jason Web Token
:return: Dictionary containing 'jwks' (the JWKS) and 'iss' (the issuer of
the JWT)
"""
_jws = factory(sjwt)
_json = _jws.jwt.part[1]
_body = json.loads(as_unicode(_json))
iss = _body['iss']
_jwks = _body['jwks']
_kj = jwks_to_keyjar(_jwks, iss)
try:
_kid = _jws.jwt.headers['kid']
except KeyError:
_keys = _kj.get_signing_key(owner=iss)
else:
_keys = _kj.get_signing_key(owner=iss, kid=_kid)
_ver = _jws.verify_compact(sjwt, _keys)
return {'jwks': _ver['jwks'], 'iss': iss} |
def request_signed_by_signing_keys(keyjar, msreq, iss, lifetime, kid=''):
"""
A metadata statement signing request with 'signing_keys' signed by one
of the keys in 'signing_keys'.
:param keyjar: A KeyJar instance with the private signing key
:param msreq: Metadata statement signing request. A MetadataStatement
instance.
:param iss: Issuer of the signing request also the owner of the signing
keys.
:return: Signed JWT where the body is the metadata statement
"""
try:
jwks_to_keyjar(msreq['signing_keys'], iss)
except KeyError:
jwks = keyjar.export_jwks(issuer=iss)
msreq['signing_keys'] = jwks
_jwt = JWT(keyjar, iss=iss, lifetime=lifetime)
return _jwt.pack(owner=iss, kid=kid, payload=msreq.to_dict()) |
def verify_request_signed_by_signing_keys(smsreq):
"""
Verify that a JWT is signed with a key that is inside the JWT.
:param smsreq: Signed Metadata Statement signing request
:return: Dictionary containing 'ms' (the signed request) and 'iss' (the
issuer of the JWT).
"""
_jws = factory(smsreq)
_json = _jws.jwt.part[1]
_body = json.loads(as_unicode(_json))
iss = _body['iss']
_jwks = _body['signing_keys']
_kj = jwks_to_keyjar(_jwks, iss)
try:
_kid = _jws.jwt.headers['kid']
except KeyError:
_keys = _kj.get_signing_key(owner=iss)
else:
_keys = _kj.get_signing_key(owner=iss, kid=_kid)
_ver = _jws.verify_compact(smsreq, _keys)
# remove the JWT specific claims
for k in JsonWebToken.c_param.keys():
try:
del _ver[k]
except KeyError:
pass
try:
del _ver['kid']
except KeyError:
pass
return {'ms': MetadataStatement(**_ver), 'iss': iss} |
def card(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped |
def library(func):
"""
A decorator for providing a unittest with a library and have it called only
once.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
SINGLES.append(wrapped)
return wrapped |
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import join, exists, isdir, splitext, basename, sep
if not testdir or not exists(testdir) or not isdir(testdir):
return None
from os import walk
import fnmatch
import imp
for root, _, filenames in walk(testdir):
for filename in fnmatch.filter(filenames, '*.py'):
path = join(root, filename)
modulepath = splitext(root)[0].replace(sep, '.')
imp.load_source(modulepath, path) |
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="test/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
cardcount, passes, failures = execute_tests(library)
print(RESULTS.format(len(SINGLES), len(TESTS), cardcount, passes,
failures))
sys.exit(failures) |
def letter_score(letter):
"""Returns the Scrabble score of a letter.
Args:
letter: a single character string
Raises:
TypeError if a non-Scrabble character is supplied
"""
score_map = {
1: ["a", "e", "i", "o", "u", "l", "n", "r", "s", "t"],
2: ["d", "g"],
3: ["b", "c", "m", "p"],
4: ["f", "h", "v", "w", "y"],
5: ["k"],
8: ["j", "x"],
10: ["q", "z"],
}
for score, letters in score_map.items():
if letter.lower() in letters:
return score
else:
raise TypeError("Invalid letter: %s", letter) |
def word_score(word, input_letters, questions=0):
"""Checks the Scrabble score of a single word.
Args:
word: a string to check the Scrabble score of
input_letters: the letters in our rack
questions: integer of the tiles already on the board to build on
Returns:
an integer Scrabble score amount for the word
"""
score = 0
bingo = 0
filled_by_blanks = []
rack = list(input_letters) # make a copy to speed up find_anagrams()
for letter in word:
if letter in rack:
bingo += 1
score += letter_score(letter)
rack.remove(letter)
else:
filled_by_blanks.append(letter_score(letter))
# we can have both ?'s and _'s in the word. this will apply the ?s to the
# highest scrabble score value letters and leave the blanks for low points.
for blank_score in sorted(filled_by_blanks, reverse=True):
if questions > 0:
score += blank_score
questions -= 1
# 50 bonus points for using all the tiles in your rack
if bingo > 6:
score += 50
return score |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.