code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if self._client: yield self._client.halt() protocol.ReconnectingClientFactory.stopFactory(self)
def stopFactory(self)
Stop the factory. See the documentation of `twisted.internet.protocol.ReconnectingClientFactory` for details.
9.191732
7.111704
1.29248
if self._client and not self._client.is_closed: return defer.succeed(self._client) else: return self._client_deferred
def when_connected(self)
Retrieve the currently-connected Protocol, or the next one to connect. Returns: defer.Deferred: A Deferred that fires with a connected :class:`FedoraMessagingProtocolV2` instance. This is similar to the whenConnected method from the Twisted endpoints APIs, which is sadly isn't available before 16.1.0, which isn't available in EL7.
4.280875
4.499693
0.95137
expanded_bindings = collections.defaultdict(list) for binding in bindings: for key in binding["routing_keys"]: b = binding.copy() del b["routing_keys"] b["routing_key"] = key expanded_bindings[b["queue"]].append(b) expanded_queues = [] for name, settings in queues.items(): q = {"queue": name} q.update(settings) expanded_queues.append(q) protocol = yield self.when_connected() consumers = [] for queue in expanded_queues: yield protocol.declare_queues([queue]) b = expanded_bindings.get(queue["queue"], []) yield protocol.bind_queues(b) consumer = yield protocol.consume(callback, queue["queue"]) self._consumers[queue["queue"]] = (consumer, queue, b) consumers.append(consumer) defer.returnValue(consumers)
def consume(self, callback, bindings, queues)
Start a consumer that lasts across individual connections. Args: callback (callable): A callable object that accepts one positional argument, a :class:`Message` or a class object that implements the ``__call__`` method. The class will be instantiated before use. bindings (dict or list of dict): Bindings to declare before consuming. This should be the same format as the :ref:`conf-bindings` configuration. queues (dict): The queues to declare and consume from. Each key in this dictionary is a queue, and each value is its settings as a dictionary. These settings dictionaries should have the "durable", "auto_delete", "exclusive", and "arguments" keys. Refer to :ref:`conf-queues` for details on their meanings. Returns: defer.Deferred: A deferred that fires with the list of one or more :class:`fedora_messaging.twisted.consumer.Consumer` objects. These can be passed to the :meth:`FedoraMessagingFactoryV2.cancel` API to halt them. Each consumer object has a ``result`` instance variable that is a Deferred that fires or errors when the consumer halts. The Deferred may error back with a BadDeclaration if the user does not have permissions to consume from the queue.
2.904286
2.727041
1.064995
for consumer in consumers: del self._consumers[consumer.queue] protocol = yield self.when_connected() yield protocol.cancel(consumer)
def cancel(self, consumers)
Cancel a consumer that was previously started with consume. Args: consumer (list of fedora_messaging.api.Consumer): The consumers to cancel.
7.831071
9.502551
0.824102
# Remove it from protocol and factory so it doesn't restart later. try: del self._protocol._consumers[self.queue] except (KeyError, AttributeError): pass try: del self._protocol.factory._consumers[self.queue] except (KeyError, AttributeError): pass # Signal to the _read loop it's time to stop and wait for it to finish # with whatever message it might be working on, then wait for the deferred # to fire which indicates it is done. self._running = False yield self._read_loop try: yield self._channel.basic_cancel(consumer_tag=self._tag) except pika.exceptions.AMQPChannelError: # Consumers are tied to channels, so if this channel is dead the # consumer should already be canceled (and we can't get to it anyway) pass try: yield self._channel.close() except pika.exceptions.AMQPChannelError: pass if not self.result.called: self.result.callback(self)
def cancel(self)
Cancel the consumer and clean up resources associated with it. Consumers that are canceled are allowed to finish processing any messages before halting. Returns: defer.Deferred: A deferred that fires when the consumer has finished processing any message it was in the middle of and has been successfully canceled.
5.060888
4.847458
1.044029
if not isinstance(bindings, (list, tuple)): raise exceptions.ConfigurationException( "bindings must be a list or tuple of dictionaries, but was a {}".format( type(bindings) ) ) for binding in bindings: missing_keys = [] for key in ("queue", "exchange", "routing_keys"): if key not in binding: missing_keys.append(key) if missing_keys: raise exceptions.ConfigurationException( "a binding is missing the following keys from its settings " "value: {}".format(missing_keys) ) if not isinstance(binding["routing_keys"], (list, tuple)): raise exceptions.ConfigurationException( "routing_keys must be a list or tuple, but was a {}".format( type(binding["routing_keys"]) ) )
def validate_bindings(bindings)
Validate the bindings configuration. Raises: exceptions.ConfigurationException: If the configuration provided is of an invalid format.
2.143945
2.12375
1.009509
if not isinstance(queues, dict): raise exceptions.ConfigurationException( "'queues' must be a dictionary mapping queue names to settings." ) for queue, settings in queues.items(): if not isinstance(settings, dict): raise exceptions.ConfigurationException( "the {} queue in the 'queues' setting has a value of type {}, but it " "should be a dictionary of settings.".format(queue, type(settings)) ) missing_keys = [] for key in ("durable", "auto_delete", "exclusive", "arguments"): if key not in settings: missing_keys.append(key) if missing_keys: raise exceptions.ConfigurationException( "the {} queue is missing the following keys from its settings " "value: {}".format(queue, missing_keys) )
def validate_queues(queues)
Validate the queues configuration. Raises: exceptions.ConfigurationException: If the configuration provided is of an invalid format.
2.544159
2.490266
1.021641
for key in ("version", "information", "product"): # Nested dictionaries are not merged so key can be missing if key not in props: props[key] = DEFAULTS["client_properties"][key] # Don't let users override these as they identify this library in AMQP if props[key] != DEFAULTS["client_properties"][key]: raise exceptions.ConfigurationException( '"{}" is a reserved keyword in client_properties'.format(key) )
def validate_client_properties(props)
Validate the client properties setting. This will add the "version", "information", and "product" keys if they are missing. All other keys are application-specific. Raises: exceptions.ConfigurationException: If any of the basic keys are overridden.
8.747397
6.499147
1.34593
for key in self: if key not in DEFAULTS: raise exceptions.ConfigurationException( 'Unknown configuration key "{}"! Valid configuration keys are' " {}".format(key, list(DEFAULTS.keys())) ) validate_queues(self["queues"]) validate_bindings(self["bindings"]) validate_client_properties(self["client_properties"])
def _validate(self)
Perform checks on the configuration to assert its validity Raises: ConfigurationException: If the configuration is invalid.
5.336932
5.034889
1.05999
self.loaded = True config = copy.deepcopy(DEFAULTS) if config_path is None: if "FEDORA_MESSAGING_CONF" in os.environ: config_path = os.environ["FEDORA_MESSAGING_CONF"] else: config_path = "/etc/fedora-messaging/config.toml" if os.path.exists(config_path): _log.info("Loading configuration from {}".format(config_path)) with open(config_path) as fd: try: file_config = toml.load(fd) for key in file_config: config[key.lower()] = file_config[key] except toml.TomlDecodeError as e: msg = "Failed to parse {}: error at line {}, column {}: {}".format( config_path, e.lineno, e.colno, e.msg ) raise exceptions.ConfigurationException(msg) else: _log.info("The configuration file, {}, does not exist.".format(config_path)) self.update(config) self._validate() return self
def load_config(self, config_path=None)
Load application configuration from a file and merge it with the default configuration. If the ``FEDORA_MESSAGING_CONF`` environment variable is set to a filesystem path, the configuration will be loaded from that location. Otherwise, the path defaults to ``/etc/fedora-messaging/config.toml``.
2.362332
2.067073
1.142839
client_cert = None ca_cert = None key = config.conf["tls"]["keyfile"] cert = config.conf["tls"]["certfile"] ca_file = config.conf["tls"]["ca_cert"] if ca_file: with open(ca_file, "rb") as fd: # Open it in binary mode since otherwise Twisted will immediately # re-encode it as ASCII, which won't work if the cert bundle has # comments that can't be encoded with ASCII. ca_cert = ssl.Certificate.loadPEM(fd.read()) if key and cert: # Note that _configure_tls_parameters sets the auth mode to EXTERNAL # if both key and cert are defined, so we don't need to do that here. with open(key) as fd: client_keypair = fd.read() with open(cert) as fd: client_keypair += fd.read() client_cert = ssl.PrivateCertificate.loadPEM(client_keypair) hostname = parameters.host if not isinstance(hostname, six.text_type): # Twisted requires the hostname as decoded text, which it isn't in Python 2 # Decode with the system encoding since this came from the config file. Die, # Python 2, die. hostname = hostname.decode(locale.getdefaultlocale()[1]) try: context_factory = ssl.optionsForClientTLS( hostname, trustRoot=ca_cert or ssl.platformTrust(), clientCertificate=client_cert, extraCertificateOptions={"raiseMinimumTo": ssl.TLSVersion.TLSv1_2}, ) except AttributeError: # Twisted 12.2 path for EL7 :( context_factory = ssl.CertificateOptions( certificate=client_cert.original, privateKey=client_cert.privateKey.original, caCerts=[ca_cert.original] or ssl.platformTrust(), verify=True, requireCertificate=True, verifyOnce=False, enableSessions=False, ) return context_factory
def _ssl_context_factory(parameters)
Produce a Twisted SSL context object from a pika connection parameter object. This is necessary as Twisted manages the connection, not Pika. Args: parameters (pika.ConnectionParameters): The connection parameters built from the fedora_messaging configuration.
5.13344
5.37981
0.954205
self._service.factory.stopTrying() yield self._service.factory.stopFactory() yield service.MultiService.stopService(self)
def stopService(self)
Gracefully stop the service. Returns: defer.Deferred: a Deferred which is triggered when the service has finished shutting down.
8.934968
9.70587
0.920574
cert = config.conf["tls"]["certfile"] key = config.conf["tls"]["keyfile"] if cert and key: _log.info( "Authenticating with server using x509 (certfile: %s, keyfile: %s)", cert, key, ) parameters.credentials = pika.credentials.ExternalCredentials() else: cert, key = None, None if SSLOptions is None: parameters.ssl = True parameters.ssl_options = { "keyfile": key, "certfile": cert, "ca_certs": config.conf["tls"]["ca_cert"], "cert_reqs": ssl.CERT_REQUIRED, "ssl_version": ssl.PROTOCOL_TLSv1_2, } else: ssl_context = ssl.create_default_context() if config.conf["tls"]["ca_cert"]: try: ssl_context.load_verify_locations(cafile=config.conf["tls"]["ca_cert"]) except ssl.SSLError as e: raise ConfigurationException( 'The "ca_cert" setting in the "tls" section is invalid ({})'.format( e ) ) ssl_context.options |= ssl.OP_NO_SSLv2 ssl_context.options |= ssl.OP_NO_SSLv3 ssl_context.options |= ssl.OP_NO_TLSv1 ssl_context.options |= ssl.OP_NO_TLSv1_1 ssl_context.verify_mode = ssl.CERT_REQUIRED ssl_context.check_hostname = True if cert and key: try: ssl_context.load_cert_chain(cert, key) except ssl.SSLError as e: raise ConfigurationException( 'The "keyfile" setting in the "tls" section is invalid ({})'.format( e ) ) parameters.ssl_options = SSLOptions( ssl_context, server_hostname=parameters.host )
def _configure_tls_parameters(parameters)
Configure the pika connection parameters for TLS based on the configuration. This modifies the object provided to it. This accounts for whether or not the new API based on the standard library's SSLContext is available for pika. Args: parameters (pika.ConnectionParameters): The connection parameters to apply TLS connection settings to.
1.997144
2.00875
0.994222
message.validate() try: self._connect_and_publish(exchange, message) except (pika_errs.NackError, pika_errs.UnroutableError) as e: _log.warning("Message was rejected by the broker (%s)", str(e)) raise PublishReturned(reason=e) except (pika_errs.ConnectionClosed, pika_errs.AMQPChannelError): # Because this is a blocking connection (and thus can't heartbeat) # we might need to restart the connection. _log.info("Resetting connection to %s", self._parameters.host) self._connection = self._channel = None try: self._connect_and_publish(exchange, message) except (pika_errs.NackError, pika_errs.UnroutableError) as e: _log.warning("Message was rejected by the broker (%s)", str(e)) raise PublishReturned(reason=e) except pika_errs.AMQPError as e: _log.error(str(e)) if self._connection and self._connection.is_open: self._connection.close() raise ConnectionException(reason=e) except pika_errs.AMQPError as e: if self._connection and self._connection.is_open: self._connection.close() raise ConnectionException(reason=e)
def publish(self, message, exchange=None)
Publish a :class:`fedora_messaging.message.Message` to an `exchange`_ on the message broker. >>> from fedora_messaging import _session, message >>> msg = message.Message(topic='test', body={'test':'message'}) >>> sess = session.BlockingSession() >>> sess.publish(msg) Args: message (message.Message): The message to publish. exchange (str): The name of the AMQP exchange to publish to; defaults to :ref:`conf-publish-exchange` Raises: PublishReturned: If the published message is rejected by the broker. ConnectionException: If a connection error occurs while publishing. .. _exchange: https://www.rabbitmq.com/tutorials/amqp-concepts.html#exchanges
2.685561
2.580285
1.0408
if self._channel: _log.info("Halting %r consumer sessions", self._channel.consumer_tags) self._running = False if self._connection and self._connection.is_open: self._connection.close() # Reset the signal handler for signum in (signal.SIGTERM, signal.SIGINT): signal.signal(signum, signal.SIG_DFL)
def _shutdown(self)
Gracefully shut down the consumer and exit.
4.39327
3.909187
1.123832
_log.info("Consumer canceled; returning all unprocessed messages to the queue") self._channel.basic_nack(delivery_tag=0, multiple=True, requeue=True)
def _on_cancelok(self, cancel_frame)
Called when the server acknowledges a cancel request. Args: cancel_frame (pika.spec.Basic.CancelOk): The cancelok frame from the server.
6.362356
7.762676
0.819609
channel.add_on_close_callback(self._on_channel_close) channel.add_on_cancel_callback(self._on_cancel) channel.basic_qos(callback=self._on_qosok, **config.conf["qos"])
def _on_channel_open(self, channel)
Callback used when a channel is opened. This registers all the channel callbacks. Args: channel (pika.channel.Channel): The channel that successfully opened.
3.465525
3.887413
0.891473
for name, args in self._exchanges.items(): self._channel.exchange_declare( exchange=name, exchange_type=args["type"], durable=args["durable"], auto_delete=args["auto_delete"], arguments=args["arguments"], passive=config.conf["passive_declares"], callback=self._on_exchange_declareok, ) for name, args in self._queues.items(): self._channel.queue_declare( queue=name, durable=args["durable"], auto_delete=args["auto_delete"], exclusive=args["exclusive"], arguments=args["arguments"], passive=config.conf["passive_declares"], callback=self._on_queue_declareok, )
def _on_qosok(self, qosok_frame)
Callback invoked when the server acknowledges the QoS settings. Asserts or creates the exchanges and queues exist. Args: qosok_frame (pika.spec.Basic.Qos): The frame send from the server.
1.952169
1.893331
1.031076
if isinstance(reply_code_or_reason, pika_errs.ChannelClosed): reply_code = reply_code_or_reason.reply_code reply_text = reply_code_or_reason.reply_text elif isinstance(reply_code_or_reason, int): reply_code = reply_code_or_reason else: reply_code = 0 reply_text = str(reply_code_or_reason) _log.info("Channel %r closed (%d): %s", channel, reply_code, reply_text) self._channel = None
def _on_channel_close(self, channel, reply_code_or_reason, reply_text=None)
Callback invoked when the channel is closed. Args: channel (pika.channel.Channel): The channel that got closed. reply_code_or_reason (int|Exception): The reason why the channel was closed. In older versions of pika, this is the AMQP code. reply_text (str): The human-readable reason for the channel's closure (only in older versions of pika).
1.966923
2.002985
0.981996
_log.info("Successfully opened connection to %s", connection.params.host) self._channel = connection.channel(on_open_callback=self._on_channel_open)
def _on_connection_open(self, connection)
Callback invoked when the connection is successfully established. Args: connection (pika.connection.SelectConnection): The newly-estabilished connection.
4.01585
4.178466
0.961082
self._channel = None if isinstance(reply_code_or_reason, pika_errs.ConnectionClosed): reply_code = reply_code_or_reason.reply_code reply_text = reply_code_or_reason.reply_text elif isinstance(reply_code_or_reason, int): reply_code = reply_code_or_reason else: reply_code = 0 reply_text = str(reply_code_or_reason) if reply_code == 200: # Normal shutdown, exit the consumer. _log.info("Server connection closed (%s), shutting down", reply_text) connection.ioloop.stop() else: _log.warning( "Connection to %s closed unexpectedly (%d): %s", connection.params.host, reply_code, reply_text, ) self.call_later(1, self.reconnect)
def _on_connection_close(self, connection, reply_code_or_reason, reply_text=None)
Callback invoked when a previously-opened connection is closed. Args: connection (pika.connection.SelectConnection): The connection that was just closed. reply_code_or_reason (int|Exception): The reason why the channel was closed. In older versions of pika, this is the AMQP code. reply_text (str): The human-readable reason the connection was closed (only in older versions of pika)
2.558162
2.57708
0.992659
self._channel = None if isinstance(error_message, pika_errs.AMQPConnectionError): error_message = repr(error_message.args[0]) _log.error(error_message) self.call_later(1, self.reconnect)
def _on_connection_error(self, connection, error_message)
Callback invoked when the connection failed to be established. Args: connection (pika.connection.SelectConnection): The connection that failed to open. error_message (str): The reason the connection couldn't be opened.
4.677793
5.030271
0.929929
_log.info("Successfully declared the %s queue", frame.method.queue) for binding in self._bindings: if binding["queue"] == frame.method.queue: for key in binding["routing_keys"]: _log.info( "Asserting %s is bound to %s with the %s key", binding["queue"], binding["exchange"], key, ) self._channel.queue_bind( callback=None, queue=binding["queue"], exchange=binding["exchange"], routing_key=key, ) bc_args = dict(queue=frame.method.queue) if _pika_version < pkg_resources.parse_version("1.0.0b1"): bc_args["consumer_callback"] = self._on_message else: bc_args["on_message_callback"] = self._on_message tag = self._channel.basic_consume(**bc_args) self._consumers[tag] = binding["queue"]
def _on_queue_declareok(self, frame)
Callback invoked when a queue is successfully declared. Args: frame (pika.frame.Method): The message sent from the server.
2.828974
2.821345
1.002704
if hasattr(self._connection.ioloop, "call_later"): self._connection.ioloop.call_later(delay, callback) else: self._connection.ioloop.add_timeout(delay, callback)
def call_later(self, delay, callback)
Schedule a one-shot timeout given delay seconds. This method is only useful for compatibility with older versions of pika. Args: delay (float): Non-negative number of seconds from now until expiration callback (method): The callback method, having the signature `callback()`
2.664868
2.787262
0.956088
# This is the old connection instance, stop its ioloop. self._connection.ioloop.stop() if self._running: # Create a new connection self.connect() # There is now a new connection, needs the new ioloop to run. self._connection.ioloop.start()
def reconnect(self)
Will be invoked by the IOLoop timer if the connection is closed. See the _on_connection_close method.
7.141473
5.547825
1.287256
self._bindings = bindings or config.conf["bindings"] self._queues = queues or config.conf["queues"] self._exchanges = exchanges or config.conf["exchanges"] # If the callback is a class, create an instance of it first if inspect.isclass(callback): cb_obj = callback() if not callable(cb_obj): raise ValueError( "Callback must be a class that implements __call__" " or a function." ) self._consumer_callback = cb_obj elif callable(callback): self._consumer_callback = callback else: raise ValueError( "Callback must be a class that implements __call__" " or a function." ) self._running = True self.connect() self._connection.ioloop.start()
def consume(self, callback, bindings=None, queues=None, exchanges=None)
Consume messages from a message queue. Simply define a callable to be used as the callback when messages are delivered and specify the queue bindings. This call blocks. The callback signature should accept a single positional argument which is an instance of a :class:`Message` (or a sub-class of it). Args: callback (callable): The callable to pass the message to when one arrives. bindings (list of dict): A list of dictionaries describing bindings for queues. Refer to the :ref:`conf-bindings` configuration documentation for the format. queues (dict): A dictionary of queues to ensure exist. Refer to the :ref:`conf-queues` configuration documentation for the format. exchanges (dict): A dictionary of exchanges to ensure exist. Refer to the :ref:`conf-exchanges` configuration documentation for the format. Raises: HaltConsumer: Raised when the consumer halts. ValueError: If the callback isn't a callable object or a class with __call__ defined.
2.554862
2.389493
1.069207
_log.debug("Message arrived with delivery tag %s", delivery_frame.delivery_tag) try: message = get_message(delivery_frame.routing_key, properties, body) message.queue = self._consumers[delivery_frame.consumer_tag] except ValidationError: channel.basic_nack(delivery_tag=delivery_frame.delivery_tag, requeue=False) return try: _log.info( 'Consuming message from topic "%s" (id %s)', message.topic, properties.message_id, ) self._consumer_callback(message) channel.basic_ack(delivery_tag=delivery_frame.delivery_tag) except Nack: _log.info("Returning message id %s to the queue", properties.message_id) channel.basic_nack(delivery_tag=delivery_frame.delivery_tag, requeue=True) except Drop: _log.info("Dropping message id %s", properties.message_id) channel.basic_nack(delivery_tag=delivery_frame.delivery_tag, requeue=False) except HaltConsumer as e: _log.info( "Consumer requested halt on message id %s with requeue=%s", properties.message_id, e.requeue, ) channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=e.requeue ) self._shutdown() if e.exit_code != 0: raise except Exception as e: _log.exception("Received unexpected exception from consumer callback") channel.basic_nack(delivery_tag=0, multiple=True, requeue=True) self._shutdown() raise HaltConsumer(exit_code=1, reason=e, requeue=True)
def _on_message(self, channel, delivery_frame, properties, body)
Callback when a message is received from the server. This method wraps a user-registered callback for message delivery. It decodes the message body, determines the message schema to validate the message with, and validates the message before passing it on to the user callback. This also handles acking, nacking, and rejecting messages based on exceptions raised by the consumer callback. For detailed documentation on the user-provided callback, see the user guide on consuming. Args: channel (pika.channel.Channel): The channel from which the message was received. delivery_frame (pika.spec.Deliver): The delivery frame which includes details about the message like content encoding and its delivery tag. properties (pika.spec.BasicProperties): The message properties like the message headers. body (bytes): The message payload. Raises: HaltConsumer: Raised when the consumer halts.
2.291983
2.224107
1.030518
params = OrderedDict([("s", size), ("d", default)]) query = parse.urlencode(params) address = email.utils.parseaddr(from_header)[1] value_hash = sha256(address.encode("utf-8")).hexdigest() return "https://seccdn.libravatar.org/avatar/{}?{}".format(value_hash, query)
def get_avatar(from_header, size=64, default="retro")
Get the avatar URL from the email's From header. Args: from_header (str): The email's From header. May contain the sender's full name. Returns: str: The URL to that sender's avatar.
3.578527
3.811915
0.938774
base_url = "https://lists.fedoraproject.org/archives" archived_at = self._get_archived_at() if archived_at and archived_at.startswith("<"): archived_at = archived_at[1:] if archived_at and archived_at.endswith(">"): archived_at = archived_at[:-1] if archived_at and archived_at.startswith("http"): return archived_at elif archived_at: return base_url + archived_at else: return None
def url(self)
An URL to the email in HyperKitty Returns: str or None: A relevant URL.
2.871679
2.728276
1.052562
with open(requirements_file) as fd: lines = fd.readlines() dependencies = [] for line in lines: maybe_dep = line.strip() if maybe_dep.startswith("#"): # Skip pure comment lines continue if maybe_dep.startswith("git+"): # VCS reference for dev purposes, expect a trailing comment # with the normal requirement __, __, maybe_dep = maybe_dep.rpartition("#") else: # Ignore any trailing comment maybe_dep, __, __ = maybe_dep.partition("#") # Remove any whitespace and assume non-empty results are dependencies maybe_dep = maybe_dep.strip() if maybe_dep: dependencies.append(maybe_dep) return dependencies
def get_requirements(requirements_file="requirements.txt")
Get the contents of a file listing the requirements. Args: requirements_file (str): The path to the requirements file, relative to this file. Returns: list: the list of requirements, or an empty list if ``requirements_file`` could not be opened or read.
4.400269
4.635305
0.949294
openid = "http://{}.id.fedoraproject.org/".format(username) return libravatar_url(openid=openid, size=size, default=default)
def user_avatar_url(username, size=64, default="retro")
Get the avatar URL of the provided Fedora username. The URL is returned from the Libravatar service. Args: username (str): The username to get the avatar of. size (int): Size of the avatar in pixels (it's a square). default (str): Default avatar to return if not found. Returns: str: The URL to the avatar image.
6.230294
7.93616
0.785051
# We use an OrderedDict here to make testing easier (URL strings become # predictable). params = collections.OrderedDict([("s", size), ("d", default)]) query = parse.urlencode(params) if email: value = email elif openid: value = openid else: raise ValueError("You must provide either the email or the openid.") idhash = sha256(value.encode("utf-8")).hexdigest() return "https://seccdn.libravatar.org/avatar/%s?%s" % (idhash, query)
def libravatar_url(email=None, openid=None, size=64, default="retro")
Get the URL to an avatar from libravatar. Either the user's email or openid must be provided. If you want to use Libravatar federation (through DNS), you should install and use the ``libravatar`` library instead. Check out the ``libravatar.libravatar_url()`` function. Args: email (str): The user's email openid (str): The user's OpenID size (int): Size of the avatar in pixels (it's a square). default (str): Default avatar to return if not found. Returns: str: The URL to the avatar image. Raises: ValueError: If neither email nor openid are provided.
3.697143
4.143764
0.892218
args = token.split_contents() if len(args) < 2: raise template.TemplateSyntaxError( "get_parameters tag takes at least 1 argument" ) return GetParametersNode(args[1].strip())
def get_parameters(parser, token)
{% get_parameters except_field %}
2.246624
1.991538
1.128085
fields = [] for f in obj._meta.fields: fname = f.name get_choice = "get_" + fname + "_display" if hasattr(obj, get_choice): value = getattr(obj, get_choice)() else: try: value = getattr(obj, fname) except Exception: value = None if isinstance(value, list): value = ",".join(str(v) for v in value) if f.editable and value and f.name: fields.append( {"label": f.verbose_name, "name": f.name, "value": value} ) return fields
def get_all_fields(obj)
Returns a list of all field names on the instance.
2.496426
2.451432
1.018354
params = context["request"].GET.copy() for key, value in list(kwargs.items()): params[key] = value return "?" + params.urlencode()
def query_string(context, **kwargs)
Add param to the given query string
2.805518
2.461036
1.139974
params = context["request"].GET.copy() # if the given value is '-id', it's core value would be 'id' core_value = value default_order = "asc" if core_value[0] == "-": core_value = value[1:] default_order = "desc" current_value = "" # by preference get the current ordering value from the filter # so that even if no explicit ordering is in the URL, we still # get the implicit ordering, the page's default # See generics.filters.FilterSet if "filter" in context: current_value = context["filter"].ordered_value() elif "ordering" in params: current_value = params["ordering"] # The first two clauses check if the current ordering is on the # same field as the desired one, in which case we reverse the direction. # If it's on another field, we use the default direction. if current_value == core_value: order_prefix = "-" elif current_value == "-" + core_value: order_prefix = "" elif default_order == "desc": order_prefix = "-" else: order_prefix = "" params["ordering"] = order_prefix + core_value return "?" + params.urlencode()
def query_string_ordering(context, value, **kwargs)
Add ordering param to the given query string :param context: template context :param value: examples would be '-id' or 'id'. A minus indicates that the default sorting is descending :param kwargs: not used :return: Adjusted query string, starting with '?'
4.57199
4.467266
1.023443
def reverse_mutable_url_args(url_args): mutated_url_args = [] for arg in url_args: # listview item, and argument is a string if "item" in context and type(arg) == str: # try to get attribute of this object try: arg = getattr(context["v"], arg.split(".")[-1]) # if not found fallback to row pk, which is always first column except Exception: arg = context["item"][0] mutated_url_args.append(arg) return reverse(link, args=mutated_url_args, kwargs=None) url_args = args # set arguments defined in urls if provided if type(link) in (tuple, list): context["urls"][link[0]] = list(link[1:]) link = link[0] if link in context["urls"]: # for where the params directly given. e.g. ('article-detail', # (self.object.pk,)) url_args = context["urls"][link] # list given, which means it's mutable! if isinstance(url_args, list): return reverse_mutable_url_args(url_args) return reverse(link, args=url_args, kwargs=None)
def arctic_url(context, link, *args, **kwargs)
Resolves links into urls with optional arguments set in self.urls. please check get_urls method in View. We could tie this to check_url_access() to check for permissions, including object-level.
5.655836
5.514128
1.025699
app_model = getattr(settings, "ARCTIC_ROLE_MODEL", "arctic.Role") try: return django_apps.get_model(app_model) except ValueError: raise ImproperlyConfigured( "ARCTIC_ROLE_MODEL must be of the " "form 'app_label.model_name'" ) except LookupError: raise ImproperlyConfigured( "ARCTIC_ROLE_MODEL refers to model '%s' that has not been " "installed" % settings.ARCTIC_ROLE_MODEL )
def get_role_model()
Returns the Role model that is active in this project.
2.154368
2.158788
0.997953
app_model = getattr(settings, "ARCTIC_USER_ROLE_MODEL", "arctic.UserRole") try: return django_apps.get_model(app_model) except ValueError: raise ImproperlyConfigured( "ARCTIC_USER_ROLE_MODEL must be of the " "form 'app_label.model_name'" ) except LookupError: raise ImproperlyConfigured( "ARCTIC_USER_ROLE_MODEL refers to model '%s' that has not been " "installed" % settings.ARCTIC_USER_ROLE_MODEL )
def get_user_role_model()
Returns the UserRole model that is active in this project.
2.096695
2.075481
1.010221
if self.requires_login: if settings.LOGIN_URL is None or settings.LOGOUT_URL is None: raise ImproperlyConfigured( "LOGIN_URL and LOGOUT_URL " "has to be defined if requires_login is True" ) if not request.user.is_authenticated: return redirect( "%s?next=%s" % ( resolve_url(settings.LOGIN_URL), quote(request.get_full_path()), ) ) return super(View, self).dispatch(request, *args, **kwargs)
def dispatch(self, request, *args, **kwargs)
Most views in a CMS require a login, so this is the default setup. If a login is not required then the requires_login property can be set to False to disable this.
2.386909
2.080566
1.14724
if not self.breadcrumbs: return None else: allowed_breadcrumbs = [] for breadcrumb in self.breadcrumbs: # check permission based on named_url if breadcrumb[1] is not None and not view_from_url( breadcrumb[1] ).has_permission(self.request.user): continue obj = self if not hasattr(self, "object") else self.object url = ( None if not breadcrumb[1] else reverse_url(breadcrumb[1], obj) ) allowed_breadcrumbs.append({"name": breadcrumb[0], "url": url}) return allowed_breadcrumbs
def get_breadcrumbs(self)
Breadcrumb format: (('name', 'url'), ...) or None if not used.
3.666537
3.333188
1.100009
if not self.tabs: return None else: allowed_tabs = [] for tab in self.tabs: # check permission based on named_url if not view_from_url(tab[1]).has_permission(self.request.user): continue obj = self if not hasattr(self, "object") else self.object url = reverse_url(tab[1], obj) allowed_tabs.append( { "name": tab[0], "active": self.request.path == url, "url": self.in_modal(url), } ) return allowed_tabs
def get_tabs(self)
Tabs format: (('name', 'url'), ...) or None if tabs are not used.
4.127637
3.877006
1.064646
media = self._get_common_media() media += self._get_view_media() media += self.get_media_assets() return media
def media(self)
Return all media required to render this view, including forms.
6.343915
4.46135
1.421972
try: css = self.Media.css except AttributeError: css = {} try: js = self.Media.js except AttributeError: js = [] return Media(css=css, js=js)
def _get_view_media(self)
Gather view-level media assets
3.331502
3.042572
1.094963
model = self.object_list.model result = [] if not self.get_fields(): result.append( {"name": "", "verbose": str(model._meta.verbose_name)} ) else: prefix = self.get_prefix() ordering_fields = self.get_ordering_fields() for field_name in self.get_fields(): item = {} if isinstance(field_name, tuple): # custom property that is not a field of the model name = field_name[0] item["label"] = field_name[1] else: name = field_name try: field_meta = find_field_meta(model, field_name) if field_meta._verbose_name: # noqa # explicitly set on the model, so don't change item["label"] = field_meta._verbose_name # noqa else: # title-case the field name (issue #80) item["label"] = field_meta.verbose_name.title() except FieldDoesNotExist: item["label"] = field_name except AttributeError: item["label"] = field_name item["name"] = prefix + name if name in ordering_fields: item["order_url"], item[ "order_direction" ] = self.ordering_url(name) result.append(item) return result
def get_list_header(self)
Creates a list of dictionaries with the field names, labels, field links, field css classes, order_url and order_direction, this simplifies the creation of a table in a template.
3.155604
3.009231
1.048642
if self.sorting_field: return [self.sorting_field] prefix = self.get_prefix() fields = self.get_ordering_with_prefix() if self.prefix: fields = [f.replace(prefix, "", 1) for f in fields] return [ f for f in fields if f.lstrip("-") in self.get_ordering_fields_lookups() ]
def get_ordering(self)
Ordering used for queryset filtering (should not contain prefix).
3.972371
3.590687
1.106298
result = [] for field_name in self.get_fields(): item = {} if isinstance(field_name, tuple): # custom property that is not a field of the model item["name"] = field_name[0] item["label"] = field_name[1] else: item["name"] = field_name item["label"] = field_name.title() if item["name"] in self.get_ordering_fields(): item["order_url"], item["order_direction"] = self.ordering_url( item["name"] ) result.append(item) return result
def get_list_header(self)
Creates a list of dictionaries with the field names, labels, field links, field css classes, order_url and order_direction, this simplifies the creation of a table in a template.
2.908836
2.428332
1.197874
return IndefinitePaginator( dataset, per_page, orphans=orphans, allow_empty_first_page=allow_empty_first_page, **kwargs )
def get_paginator( self, dataset, per_page, orphans=0, allow_empty_first_page=True, **kwargs )
Return an instance of the paginator for this view.
2.192082
2.166812
1.011662
self.object = self.get_object() can_delete = True protected_objects = [] collector_message = None collector = Collector(using="default") try: collector.collect([self.object]) except ProtectedError as e: collector_message = ( "Cannot delete %s because it has relations " "that depends on it." % self.object ) protected_objects = e.protected_objects can_delete = False if can_delete and self.redirect: messages.success(request, self.get_success_message(self.object)) return self.delete(request, *args, **kwargs) context = self.get_context_data( object=self.object, can_delete=can_delete, collector_message=collector_message, protected_objects=protected_objects, ) return self.render_to_response(context)
def get(self, request, *args, **kwargs)
Catch protected relations and show to user.
2.596308
2.473756
1.049541
if not (url in self.modal_links.keys()): return None try: if type(obj) != dict: obj.obj = str(obj) obj = vars(obj) link = self.modal_links[url] if link["type"] == "confirm": link["message"] = link["message"].format(**obj) link["title"] = link["title"].format(**obj) link["ok"] # triggers a KeyError exception if not existent link["cancel"] elif link["type"] == "iframe": try: link["size"] except KeyError: link["size"] = "medium" else: raise ImproperlyConfigured( "modal_links type: " + link["type"] + " is unsupported" ) return link except KeyError as e: raise ImproperlyConfigured( "modal_links misses the following attribute: " + str(e) ) except AttributeError: return None
def get_modal_link(self, url, obj={})
Returns the metadata for a link that needs to be confirmed, if it exists, it also parses the message and title of the url to include row field data if needed.
3.541514
3.429415
1.032688
if not self.success_url: if self.request.GET.get("inmodal"): return reverse("arctic:redirect_to_parent") raise ImproperlyConfigured( "No URL to redirect to. Provide a success_url." ) return self.in_modal(str(self.success_url))
def get_success_url(self)
Return the URL to redirect to after processing a valid form.
5.074633
4.421107
1.147819
for index, field in has_no_column.items(): if index == len(has_no_column): field_name = "{field}|{col_last}".format( field=field, col_last=col_last ) has_no_column[index] = self._return_field(field_name, fields) else: field_name = "{field}|{col_avg}".format( field=field, col_avg=col_avg ) has_no_column[index] = self._return_field(field_name, fields) return has_no_column
def _set_has_no_columns(self, has_no_column, col_avg, col_last, fields)
Regenerate has_no_column by adding the amount of columns at the end
2.14854
2.092182
1.026937
collapsible = None description = None try: # Make sure strings with numbers work as well, do this int(str(fieldset)) title = None except ValueError: if fieldset.count("|") > 1: raise ImproperlyConfigured( "The fieldset name does not " "support more than one | sign. " "It's meant to separate a " "fieldset from its description." ) title = fieldset if "|" in fieldset: title, description = fieldset.split("|") if fieldset and (fieldset[0] in "-+"): if fieldset[0] == "-": collapsible = "closed" else: collapsible = "open" title = title[1:] return { "title": title, "description": description, "collapsible": collapsible, }
def _return_fieldset(self, fieldset)
This function became a bit messy, since it needs to deal with two cases. 1) No fieldset, which is represented as an integer 2) A fieldset
4.45836
4.427407
1.006991
sum_no_columns = len(has_no_column) columns_left = self.ALLOWED_COLUMNS - sum_existing_columns if sum_no_columns == 0: columns_avg = columns_left else: columns_avg = int(columns_left / sum_no_columns) remainder = columns_left - (columns_avg * sum_no_columns) columns_for_last_element = columns_avg + remainder return columns_avg, columns_for_last_element
def _calc_avg_and_last_val(self, has_no_column, sum_existing_columns)
Calculate the average of all columns and return a rounded down number. Store the remainder and add it to the last row. Could be implemented better. If the enduser wants more control, he can also just add the amount of columns. Will work fine with small number (<4) of items in a row. :param has_no_column: :param sum_existing_columns: :return: average, columns_for_last_element
3.003583
2.770628
1.08408
field_items = field.split("|") if len(field_items) == 2: return field_items[0], field_items[1] elif len(field_items) == 1: return field_items[0], None
def _split_str(self, field)
Split title|7 into (title, 7)
2.211271
1.830982
1.207697
path = self.request.path direction = "" query_params = self.request.GET.copy() ordering = self.request.GET.get("order", "").split(",") field = self._get_ordering_field_lookup(field_name) if not ordering: ordering = self.get_default_ordering() merged_ordering = list(ordering) # copy the list for ordering_field in self.get_ordering_fields_lookups(): if (ordering_field.lstrip("-") not in ordering) and ( ("-" + ordering_field.lstrip("-")) not in ordering ): merged_ordering.append(ordering_field) new_ordering = [] for item in merged_ordering: if item.lstrip("-") == field.lstrip("-"): if (item[0] == "-") or not (item in ordering): if item in ordering: direction = "desc" new_ordering.insert(0, item.lstrip("-")) else: direction = "asc" new_ordering.insert(0, "-" + item) query_params["order"] = ",".join(new_ordering) return (path + "?" + query_params.urlencode(safe=","), direction)
def ordering_url(self, field_name)
Creates a url link for sorting the given field. The direction of sorting will be either ascending, if the field is not yet sorted, or the opposite of the current sorting if sorted.
2.818602
2.769476
1.017738
if strip_labels: return [ f[0] if type(f) in (tuple, list) else f for f in self.fields ] return self.fields
def get_fields(self, strip_labels=False)
Hook to dynamically change the fields that will be displayed
3.205717
3.207055
0.999583
ordering_field = [] for field_name in self.get_ordering_fields(): ordering_field.append(self._get_ordering_field_lookup(field_name)) return ordering_field
def get_ordering_fields_lookups(self)
Getting real model fields to order by
2.710453
2.372362
1.142512
field = field_name get_field = getattr(self, "get_%s_ordering_field" % field_name, None) if get_field: field = get_field() return field
def _get_ordering_field_lookup(self, field_name)
get real model field to order by
3.552691
2.81877
1.260369
if self.get_advanced_search_form_class(): self._advanced_search_form = self.get_advanced_search_form_class()( data=data ) return self._advanced_search_form
def get_advanced_search_form(self, data)
Hook to dynamically change the advanced search form
2.559804
2.264011
1.13065
try: settings_roles = set(settings.ARCTIC_ROLES.keys()) except AttributeError: settings_roles = set() saved_roles = set(Role.objects.values_list("name", flat=True)) unsaved_roles = settings_roles - saved_roles unused_roles = saved_roles - settings_roles - set([cls.ADMIN]) # ensure that admin is not defined in settings if cls.ADMIN in settings_roles: raise ImproperlyConfigured( '"' + cls.ADMIN + '" role is reserved ' "and cannot be defined in settings" ) # ensure that admin exists in the database if cls.ADMIN not in saved_roles: Role(name=cls.ADMIN, is_active=True).save() # check if the role defined in settings already exists in the database # and if it does ensure it is enabled. for role in saved_roles: if role in settings_roles: saved_role = Role.objects.get(name=role) if not saved_role.is_active: saved_role.is_active = True saved_role.save() for role in unsaved_roles: Role(name=role).save() for role in unused_roles: unused_role = Role.objects.get(name=role) unused_role.is_active = False unused_role.save()
def sync(cls)
Save all the roles defined in the settings that are not yet in the db this is needed to create a foreign key relation between a user and a role. Roles that are no longer specified in settings are set as inactive.
2.53638
2.302583
1.101537
if cls.permission_required is None: raise ImproperlyConfigured( "{0} is missing the permission_required attribute. " "Define {0}.permission_required, or override " "{0}.get_permission_required().".format(cls.__name__) ) if isinstance(cls.permission_required, six.string_types): if cls.permission_required != "": perms = (cls.permission_required,) else: perms = () else: perms = cls.permission_required return perms
def get_permission_required(cls)
Get permission required property. Must return an iterable.
1.896577
1.857229
1.021187
# no login is needed, so its always fine if not cls.requires_login: return True # if user is somehow not logged in if not user.is_authenticated: return False # attribute permission_required is mandatory, returns tuple perms = cls.get_permission_required() # if perms are defined and empty, we skip checking if not perms: return True # get role of user, skip admin role role = user.urole.role.name if role == cls.ADMIN: return True # check if at least one permissions is valid for permission in perms: if cls.check_permission(role, permission): return True # permission denied return False
def has_permission(cls, user)
We override this method to customize the way permissions are checked. Using our roles to check permissions.
5.524145
5.317029
1.038953
result = permission in settings.ARCTIC_ROLES[role] # will try to call a method with the same name as the permission # to enable an object level permission check. if result: try: return getattr(cls, permission)(role) except AttributeError: pass return result
def check_permission(cls, role, permission)
Check if role contains permission
7.353319
7.362122
0.998804
if isinstance(val, str): val = val.lower() return val in ["true", "on", "yes", True]
def str_to_bool(val)
Helper function to turn a string representation of "true" into boolean True.
3.939082
4.559678
0.863895
bits = token.split_contents() if len(bits) < 2: raise TemplateSyntaxError( "'%s' takes at least one argument" " (Page object reference)" % bits[0] ) page = parser.compile_filter(bits[1]) kwargs = {} bits = bits[2:] kwarg_re = re.compile(r"(\w+)=(.+)") if len(bits): for bit in bits: match = kwarg_re.match(bit) if not match: raise TemplateSyntaxError( "Malformed arguments to bootstrap_pagination paginate tag" ) name, value = match.groups() kwargs[name] = parser.compile_filter(value) return PaginationNode(page, kwargs)
def arctic_paginate(parser, token)
Renders a Page object with pagination bar. Example:: {% arctic_paginate page_obj paginator=page_obj.paginator range=10 %} Named Parameters:: range - The size of the pagination bar (ie, if set to 10 then, at most, 10 page numbers will display at any given time) Defaults to None, which shows all pages. show_first_last - Accepts "true" or "false". Determines whether or not to show the first and last page links. Defaults to "false"
2.419367
2.691207
0.89899
request = kwargs.pop("request", None) user = kwargs.pop("user", None) url_full_name = ":".join( [request.resolver_match.namespace, request.resolver_match.url_name] ) if not menu_config: menu_config = settings.ARCTIC_MENU menu_dict = OrderedDict() for menu_entry in menu_config: if type(menu_entry) in (list, tuple): # check permission based on named_url path = None if menu_entry[1]: if not view_from_url(menu_entry[1]).has_permission(user): continue path = reverse(menu_entry[1]) # icons and collapse are optional icon = None if (len(menu_entry) >= 3) and ( not type(menu_entry[2]) in (list, tuple) ): icon = menu_entry[2] active_weight = len(path) if path else 0 menu_dict[menu_entry[0]] = { "url": menu_entry[1], "icon": icon, "submenu": None, "active": is_active(menu_entry, url_full_name), "active_weight": active_weight, } # check if the last item in a menu entry is a submenu submenu = _get_submenu(menu_entry) if submenu: menu_dict[menu_entry[0]]["submenu"] = menu( submenu, user=user, request=request ) return menu_clean(menu_dict)
def menu(menu_config=None, **kwargs)
Tranforms a menu definition into a dictionary which is a frendlier format to parse in a template.
3.067284
3.04002
1.008968
max_weight = -1 for _, value in list(menu_config.items()): if value["submenu"]: for _, v in list(value["submenu"].items()): if v["active"]: # parent inherits the weight of the axctive child value["active"] = True value["active_weight"] = v["active_weight"] if value["active"]: max_weight = max(value["active_weight"], max_weight) if max_weight > 0: # one of the items is active: make items with lesser weight inactive for _, value in list(menu_config.items()): if value["active"] and value["active_weight"] < max_weight: value["active"] = False return menu_config
def menu_clean(menu_config)
Make sure that only the menu item with the largest weight is active. If a child of a menu item is active, the parent should be active too. :param menu: :return:
3.593677
3.152963
1.139778
named_url = named_url[0] parts = named_url.split(":") parts.reverse() view = parts[0] path = parts[1:] current_path = None resolved_path = [] ns_pattern = "" ns_converters = {} # if it's a local url permission already given, so we just return true if named_url.startswith("#"): class LocalUrlDummyView: @staticmethod def has_permission(user): return True return LocalUrlDummyView while path: ns = path.pop() current_ns = current_path.pop() if current_path else None # Lookup the name to see if it could be an app identifier try: app_list = resolver.app_dict[ns] # Yes! Path part matches an app in the current Resolver if current_ns and current_ns in app_list: # If we are reversing for a particular app, # use that namespace ns = current_ns elif ns not in app_list: # The name isn't shared by one of the instances # (i.e., the default) so just pick the first instance # as the default. ns = app_list[0] except KeyError: pass if ns != current_ns: current_path = None try: extra, resolver = resolver.namespace_dict[ns] resolved_path.append(ns) ns_pattern = ns_pattern + extra try: ns_converters.update(resolver.pattern.converters) except Exception: pass except KeyError as key: if resolved_path: raise NoReverseMatch( "%s is not a registered namespace inside '%s'" % (key, ":".join(resolved_path)) ) else: raise NoReverseMatch("%s is not a registered namespace" % key) if ns_pattern: try: resolver = get_ns_resolver( ns_pattern, resolver, tuple(ns_converters.items()) ) except Exception: resolver = get_ns_resolver(ns_pattern, resolver) # custom code, get view from reverse_dict reverse_dict = resolver.reverse_dict.dict() for key, url_obj in reverse_dict.items(): if url_obj == reverse_dict[view] and key != view: module = importlib.import_module(key.__module__) return getattr(module, key.__name__)
def view_from_url(named_url): # noqa # code below is `stolen` from django's reverse method. resolver = get_resolver(get_urlconf()) if type(named_url) in (list, tuple)
Finds and returns the view class from a named url
4.167199
4.104356
1.015311
if "__" in value: value_list = value.split("__") attr = get_attribute(obj, value_list[0]) return find_attribute(attr, "__".join(value_list[1:])) return get_attribute(obj, value)
def find_attribute(obj, value)
Finds the attribute connected to the last object when a chain of connected objects is given in a string separated with double underscores. For example when a model x has a foreign key to model y and model y has attribute a, findattr(x, 'y__a') will return the a attribute from the y model that exists in x.
2.41571
2.590055
0.932687
if type(obj) == dict: return dict.get(obj, value) else: return getattr(obj, value)
def get_attribute(obj, value)
Normally the result of list_items for listviews are a set of model objects. But when you want a GROUP_BY query (with 'values' method), than the result will be a dict. This method will help you find an item for either objects or dictionaries.
2.876035
3.211599
0.895515
if "__" in value: value_list = value.split("__") child_obj = obj._meta.get_field(value_list[0]).rel.to return find_field_meta(child_obj, "__".join(value_list[1:])) return obj._meta.get_field(value)
def find_field_meta(obj, value)
In a model, finds the attribute meta connected to the last object when a chain of connected objects is given in a string separated with double underscores.
2.315398
2.185216
1.059574
try: return qs.model._meta.get_field(field_name).__class__.__name__ # while annotating, it's possible that field does not exists. except FieldDoesNotExist: return None
def get_field_class(qs, field_name)
Given a queryset and a field name, it will return the field's class
5.913273
5.406785
1.093676
args = [] if type(url) in (list, tuple): named_url = url[0] for arg in url[1:]: if type(obj) is dict: args.append(obj[arg]) else: args.append(find_attribute(obj, arg)) else: if url.startswith("#"): # local url return url named_url = url if obj and fallback_field: if type(obj) is dict: args = [obj[fallback_field]] else: args = [get_attribute(obj, fallback_field)] # Instead of giving NoReverseMatch exception it's more desirable, # for field_links in listviews to just ignore the link. if fallback_field and not args: return "" return reverse(named_url, args=args)
def reverse_url(url, obj, fallback_field=None)
Reverses a named url, in addition to the standard django reverse, it also accepts a list of ('named url', 'field1', 'field2', ...) and will use the value of the supplied fields as arguments. When a fallback field is given it will use it as an argument if none other are given.
4.412506
4.2915
1.028197
try: value = getattr(settings, setting_name) if valid_options and value not in valid_options: error_message = "Invalid value for {}, must be one of: {}".format( setting_name, str(valid_options) ) raise ImproperlyConfigured(error_message) except AttributeError: pass return getattr(settings, setting_name, getattr(defaults, setting_name))
def arctic_setting(setting_name, valid_options=None)
Tries to get a setting from the django settings, if not available defaults to the one defined in defaults.py
2.261729
2.167823
1.043318
def func_wrapper(self, start, stop): offset = start limit = stop - start return func(self, offset, limit) return func_wrapper
def offset_limit(func)
Decorator that converts python slicing to offset and limit
4.300247
3.667785
1.172437
if ( type(item) in (list, tuple) and len(item) and isinstance(item[0], (list, tuple)) ): return True return False
def is_list_of_list(item)
check whether the item is list (tuple) and consist of list (tuple) elements
2.610093
2.572101
1.014771
with translation.override("en"): generated_id = slugify("-".join([str(i) for i in s])) return generated_id
def generate_id(*s)
generates an id from one or more given strings it uses english as the base language in case some strings are translated, this ensures consistent ids
5.327662
5.959187
0.894025
if ignore_if_exists: for key in parameters.keys(): if key + "=" in url: del parameters[key] parameters_str = "&".join(k + "=" + v for k, v in parameters.items()) append_token = "&" if "?" in url else "?" return url + append_token + parameters_str
def append_query_parameter(url, parameters, ignore_if_exists=True)
quick and dirty appending of query parameters to a url
2.54353
2.477298
1.026735
if django.VERSION >= (1, 11): return super(BetterFileInput, self).render(name, value, attrs) t = render_to_string( template_name=self.template_name, context=self.get_context(name, value, attrs), ) return mark_safe(t)
def render(self, name, value, attrs=None, renderer=None)
For django 1.10 compatibility
2.648113
2.442164
1.08433
table = PrettyTable(["Name", "Port", "Protocol", "Description"]) table.align["Name"] = "l" table.align["Description"] = "l" table.padding_width = 1 for port in ports: table.add_row(port) return table
def get_table(ports)
This function returns a pretty table used to display the port results. :param ports: list of found ports :return: the table to display
2.212041
2.227879
0.992891
if not port and not server[0]: raise click.UsageError("Please specify a port") if server[0]: app.run(host=server[0], port=server[1]) return ports = get_ports(port, like) if not ports: sys.stderr.write("No ports found for '{0}'\n".format(port)) return if use_json: print(json.dumps(ports, indent=4)) else: table = get_table(ports) print(table)
def run(port, like, use_json, server)
Search port names and numbers.
2.738591
2.659744
1.029645
where_field = "port" if port.isdigit() else "name" if like: ports = __DB__.search(where(where_field).search(port)) else: ports = __DB__.search(where(where_field) == port) return [Port(**port) for port in ports]
def get_ports(port, like=False)
This function creates the SQL query depending on the specified port and the --like option. :param port: the specified port :param like: the --like option :return: all ports matching the given ``port`` :rtype: list
4.241195
5.443014
0.7792
return self._call_endpoint(GET_ACCOUNT_STATE, params=[address], id=id, endpoint=endpoint)
def get_account(self, address, id=None, endpoint=None)
Look up an account on the blockchain. Sample output: Args: address: (str) address to lookup ( in format 'AXjaFSP23Jkbe6Pk9pPGT6NBDs1HVdqaXK') id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
6.884741
8.095849
0.850404
return self._call_endpoint(GET_BLOCK_COUNT, id=id, endpoint=endpoint)
def get_height(self, id=None, endpoint=None)
Get the current height of the blockchain Args: id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
8.50351
8.931376
0.952094
return self._call_endpoint(GET_ASSET_STATE, params=[asset_hash], id=id, endpoint=endpoint)
def get_asset(self, asset_hash, id=None, endpoint=None)
Get an asset by its hash Args: asset_hash: (str) asset to lookup, example would be 'c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b' id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
5.48932
7.059467
0.777583
return self._call_endpoint(GET_BALANCE, params=[asset_hash], id=id, endpoint=endpoint)
def get_balance(self, asset_hash, id=None, endpoint=None)
Get balance by asset hash Args: asset_hash: (str) asset to lookup, example would be 'c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b' id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
4.17048
5.69984
0.731684
return self._call_endpoint(GET_BEST_BLOCK_HASH, id=id, endpoint=endpoint)
def get_best_blockhash(self, id=None, endpoint=None)
Get the hash of the highest block Args: id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
4.242662
5.594081
0.75842
return self._call_endpoint(GET_BLOCK, params=[height_or_hash, 1], id=id, endpoint=endpoint)
def get_block(self, height_or_hash, id=None, endpoint=None)
Look up a block by the height or hash of the block. Args: height_or_hash: (int or str) either the height of the desired block or its hash in the form '1e67372c158a4cfbb17b9ad3aaae77001a4247a00318e354c62e53b56af4006f' id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: block: a json object or the ``neorpc.Core.Block.Block`` object
4.921315
7.821525
0.629201
return self._call_endpoint(GET_BLOCK_HASH, params=[height], id=id, endpoint=endpoint)
def get_block_hash(self, height, id=None, endpoint=None)
Get hash of a block by its height Args: height: (int) height of the block to lookup id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
4.62675
6.12743
0.755088
return self._call_endpoint(GET_BLOCK_HEADER, params=[block_hash, 1], id=id, endpoint=endpoint)
def get_block_header(self, block_hash, id=None, endpoint=None)
Get the corresponding block header information according to the specified script hash. Args: block_hash: (str) the block scripthash (e.g. 'a5508c9b6ed0fc09a531a62bc0b3efcb6b8a9250abaf72ab8e9591294c1f6957') id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
4.772054
6.154856
0.775332
return self._call_endpoint(GET_BLOCK_SYS_FEE, params=[height], id=id, endpoint=endpoint)
def get_block_sysfee(self, height, id=None, endpoint=None)
Get the system fee of a block by height. This is used in calculating gas claims Args: height: (int) height of the block to lookup id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
4.138081
5.31978
0.777867
return self._call_endpoint(GET_CONNECTION_COUNT, id=id, endpoint=endpoint)
def get_connection_count(self, id=None, endpoint=None)
Gets the number of nodes connected to the endpoint Args: id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
4.893146
5.846667
0.836912
return self._call_endpoint(GET_CONTRACT_STATE, params=[contract_hash], id=id, endpoint=endpoint)
def get_contract_state(self, contract_hash, id=None, endpoint=None)
Get a contract state object by its hash Args: contract_hash: (str) the hash of the contract to lookup, for example 'd7678dd97c000be3f33e9362e673101bac4ca654' id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
4.305172
5.201852
0.827623
return self._call_endpoint(GET_RAW_MEMPOOL, id=id, endpoint=endpoint)
def get_raw_mempool(self, id=None, endpoint=None)
Returns the tx that are in the memorypool of the endpoint Args: id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
3.745962
5.121779
0.731379
return self._call_endpoint(GET_RAW_TRANSACTION, params=[tx_hash, 1], id=id, endpoint=endpoint)
def get_transaction(self, tx_hash, id=None, endpoint=None)
Look up a transaction by hash. Args: tx_hash: (str) hash in the form '58c634f81fbd4ae2733d7e3930a9849021840fc19dc6af064d6f2812a333f91d' id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json: the transaction as a json object
5.822497
8.224732
0.707925
result = self._call_endpoint(GET_STORAGE, params=[contract_hash, binascii.hexlify(storage_key.encode('utf-8')).decode('utf-8')], id=id, endpoint=endpoint) try: return bytearray(binascii.unhexlify(result.encode('utf-8'))) except Exception as e: raise NEORPCException("could not decode result %s " % e)
def get_storage(self, contract_hash, storage_key, id=None, endpoint=None)
Returns a storage item of a specified contract Args: contract_hash: (str) hash of the contract to lookup, for example 'd7678dd97c000be3f33e9362e673101bac4ca654' storage_key: (str) storage key to lookup, for example 'totalSupply' id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: bytearray: bytearray value of the storage item
3.939384
4.167994
0.945151
return self._call_endpoint(GET_TX_OUT, params=[tx_hash, vout_id], id=id, endpoint=endpoint)
def get_tx_out(self, tx_hash, vout_id, id=None, endpoint=None)
Gets a transaction output by specified transaction hash and output index Args: tx_hash: (str) hash in the form '58c634f81fbd4ae2733d7e3930a9849021840fc19dc6af064d6f2812a333f91d' vout_id: (int) index of the transaction output in the transaction id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
3.130656
3.795241
0.82489
return self._call_endpoint(INVOKE, params=[contract_hash, params], id=id, endpoint=endpoint)
def invoke_contract(self, contract_hash, params, id=None, endpoint=None)
Invokes a contract Args: contract_hash: (str) hash of the contract, for example 'd7678dd97c000be3f33e9362e673101bac4ca654' params: (list) a list of json ContractParameters to pass along with the invocation, example [{'type':7,'value':'symbol'},{'type':16, 'value':[]}] id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
6.164606
7.315067
0.842727
return self._call_endpoint(INVOKE_FUNCTION, params=[contract_hash, operation, params if params else []], id=id, endpoint=endpoint)
def invoke_contract_fn(self, contract_hash, operation, params=None, id=None, endpoint=None)
Invokes a contract Args: contract_hash: (str) hash of the contract, for example 'd7678dd97c000be3f33e9362e673101bac4ca654' operation: (str) the operation to call on the contract params: (list) a list of json ContractParameters to pass along with the invocation, example [{'type':7,'value':'symbol'},{'type':16, 'value':[]}] id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
5.41016
6.574773
0.822866