text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import json
import unittest
import os
#import sys
#import tempfile
from pycoin.serialize import h2b
from pycoin.tx import TxIn, TxOut, Tx
from pycoin.tx.script import ScriptError
from pycoin.tx.script import flags
from pycoin.tx.script.tools import compile
from pycoin.tx.script.vm import eval_script
from pycoin.tx.script.vm import verify_script
SCRIPT_VALID_JSON = os.path.dirname(__file__) + '/data/script_valid.json'
SCRIPT_INVALID_JSON = os.path.dirname(__file__) + '/data/script_invalid.json'
class TestTx(unittest.TestCase):
pass
def parse_flags(flag_string):
v = 0
if len(flag_string) > 0:
for f in flag_string.split(","):
v |= getattr(flags, "VERIFY_%s" % f)
return v
def build_credit_tx(script_out_bin):
txs_in = [TxIn(b'\0'*32, 4294967295, b'\0\0', sequence=4294967295)]
txs_out = [TxOut(0, script_out_bin)]
return Tx(1, txs_in, txs_out)
def build_spending_tx(script_in_bin, credit_tx):
txs_in = [TxIn(credit_tx.hash(), 0, script_in_bin, sequence=4294967295)]
txs_out = [TxOut(0, b'')]
spend_tx = Tx(1, txs_in, txs_out, unspents=credit_tx.tx_outs_as_spendable())
return spend_tx
def dump_failure_info(spend_tx, script_in, script_out, flags, comment):
return
print(script_in)
print(script_out)
from pycoin.serialize import b2h
def tbf(*args):
pc, opcode, data, stack, altstack, is_signature, is_condition = args
from pycoin.tx.script.tools import disassemble_for_opcode_data
opd = disassemble_for_opcode_data(opcode, data)
if len(altstack) == 0:
altstack = ''
print("%s %s\n %3x %s" % (stack, altstack, pc, opd))
import pdb
pdb.set_trace()
try:
r = spend_tx.is_signature_ok(tx_in_idx=0, traceback_f=tbf, flags=flags)
except Exception as ex:
print(ex)
print("test failed: '%s' '%s' : %s %s" % (script_in, script_out, comment, flags))
try:
r = spend_tx.is_signature_ok(tx_in_idx=0, traceback_f=tbf, flags=flags)
except Exception as ex:
print(ex)
import pdb; pdb.set_trace()
def make_test(script_in, script_out, flags_string, comment, expect_valid=True):
def f(self):
script_in_bin = compile(script_in)
script_out_bin = compile(script_out)
flags = parse_flags(flags_string)
try:
credit_tx = build_credit_tx(script_out_bin)
spend_tx = build_spending_tx(script_in_bin, credit_tx)
r = spend_tx.is_signature_ok(tx_in_idx=0, flags=flags)
except ScriptError:
r = False
except:
r = -1
if r != expect_valid:
dump_failure_info(spend_tx, script_in, script_out, flags, comment)
self.assertEqual(r, expect_valid)
return f
def items_from_json(path):
with open(path, "r") as f:
for i in json.load(f):
if len(i) >= 3:
if len(i) == 3:
i.append("no comment")
yield i
def inject():
for idx, (script_in, script_out, flags, comment) in enumerate(items_from_json(SCRIPT_VALID_JSON)):
name_of_f = "test_valid_%03d" % idx
setattr(TestTx, name_of_f, make_test(script_in, script_out, flags, comment))
print("adding %s" % name_of_f)
for idx, args in enumerate(items_from_json(SCRIPT_INVALID_JSON)):
(script_in, script_out, flags, comment) = args[:4]
name_of_f = "test_invalid_%03d" % idx
setattr(TestTx, name_of_f, make_test(script_in, script_out, flags, comment, expect_valid=False))
print("adding %s" % name_of_f)
inject()
|
{
"content_hash": "ee6954438076427fa7a035e5e768b1f7",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 104,
"avg_line_length": 33.850467289719624,
"alnum_prop": 0.6167863059083379,
"repo_name": "XertroV/pycoin",
"id": "3806110c517f754821f1ba71bd5e7cc29d17ab0c",
"size": "3645",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/vm_script_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "115"
},
{
"name": "Python",
"bytes": "520441"
},
{
"name": "Shell",
"bytes": "198"
}
],
"symlink_target": ""
}
|
from twisted.trial import unittest
import sys
import functools
from unittest import skipUnless
import six
def fake_import(orig, name, *args, **kw):
if name in ['GeoIP']:
raise ImportError('testing!')
return orig(*((name,) + args), **kw)
class TestImports(unittest.TestCase):
@skipUnless(six.PY2 and 'pypy' not in sys.version.lower(), "Doesn't work in PYPY, Py3")
def test_no_GeoIP(self):
"""
Make sure we don't explode if there's no GeoIP module
"""
global __import__
orig = __import__
try:
# attempt to ensure we've unimportted txtorcon.util
try:
del sys.modules['txtorcon.util']
except KeyError:
pass
import gc
gc.collect()
# replace global import with our test import, which will
# throw on GeoIP import no matter what
global __builtins__
__builtins__['__import__'] = functools.partial(fake_import, orig)
# now ensure we set up all the databases as "None" when we
# import w/o the GeoIP thing available.
import txtorcon.util
loc = txtorcon.util.NetLocation('127.0.0.1')
self.assertEqual(loc.city, None)
self.assertEqual(loc.asn, None)
self.assertEqual(loc.countrycode, '')
finally:
__import__ = orig
|
{
"content_hash": "127db05c9533bac067fcc6279f2fccf3",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 91,
"avg_line_length": 29.770833333333332,
"alnum_prop": 0.5675297410776767,
"repo_name": "meejah/txtorcon",
"id": "b2285c964005c7038226596f97c5840819dcd215",
"size": "1429",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "test/test_util_imports.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "791"
},
{
"name": "Makefile",
"bytes": "4448"
},
{
"name": "Python",
"bytes": "878008"
},
{
"name": "Shell",
"bytes": "1474"
}
],
"symlink_target": ""
}
|
{"name": "disabled_addon", "installable": False}
|
{
"content_hash": "352a054032377251e5e61713c6c2a64e",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 48,
"avg_line_length": 49,
"alnum_prop": 0.673469387755102,
"repo_name": "Tecnativa/docker-odoo-base",
"id": "32114e6b6cb503069bf0b91447e1eceab228ff56",
"size": "73",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/scaffoldings/dotd/custom/src/private/disabled_addon/__openerp__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "33853"
},
{
"name": "Shell",
"bytes": "6434"
}
],
"symlink_target": ""
}
|
"""Handle legacy notification platforms."""
from __future__ import annotations
import asyncio
from functools import partial
from typing import Any, cast
from homeassistant.const import CONF_DESCRIPTION, CONF_NAME
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_per_platform, discovery, template
from homeassistant.helpers.service import async_set_service_schema
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.loader import async_get_integration, bind_hass
from homeassistant.setup import async_prepare_setup_platform, async_start_setup
from homeassistant.util import slugify
from homeassistant.util.yaml import load_yaml
from .const import (
ATTR_DATA,
ATTR_MESSAGE,
ATTR_TARGET,
ATTR_TITLE,
DOMAIN,
LOGGER,
NOTIFY_SERVICE_SCHEMA,
SERVICE_NOTIFY,
)
CONF_FIELDS = "fields"
NOTIFY_SERVICES = "notify_services"
async def async_setup_legacy(hass: HomeAssistant, config: ConfigType) -> None:
"""Set up legacy notify services."""
hass.data.setdefault(NOTIFY_SERVICES, {})
async def async_setup_platform(
integration_name: str,
p_config: ConfigType | None = None,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a notify platform."""
if p_config is None:
p_config = {}
platform = await async_prepare_setup_platform(
hass, config, DOMAIN, integration_name
)
if platform is None:
LOGGER.error("Unknown notification service specified")
return
full_name = f"{DOMAIN}.{integration_name}"
LOGGER.info("Setting up %s", full_name)
with async_start_setup(hass, [full_name]):
notify_service = None
try:
if hasattr(platform, "async_get_service"):
notify_service = await platform.async_get_service(
hass, p_config, discovery_info
)
elif hasattr(platform, "get_service"):
notify_service = await hass.async_add_executor_job(
platform.get_service, hass, p_config, discovery_info
)
else:
raise HomeAssistantError("Invalid notify platform.")
if notify_service is None:
# Platforms can decide not to create a service based
# on discovery data.
if discovery_info is None:
LOGGER.error(
"Failed to initialize notification service %s",
integration_name,
)
return
except Exception: # pylint: disable=broad-except
LOGGER.exception("Error setting up platform %s", integration_name)
return
if discovery_info is None:
discovery_info = {}
conf_name = p_config.get(CONF_NAME) or discovery_info.get(CONF_NAME)
target_service_name_prefix = conf_name or integration_name
service_name = slugify(conf_name or SERVICE_NOTIFY)
await notify_service.async_setup(
hass, service_name, target_service_name_prefix
)
await notify_service.async_register_services()
hass.data[NOTIFY_SERVICES].setdefault(integration_name, []).append(
notify_service
)
hass.config.components.add(f"{DOMAIN}.{integration_name}")
setup_tasks = [
asyncio.create_task(async_setup_platform(integration_name, p_config))
for integration_name, p_config in config_per_platform(config, DOMAIN)
if integration_name is not None
]
if setup_tasks:
await asyncio.wait(setup_tasks)
async def async_platform_discovered(
platform: str, info: DiscoveryInfoType | None
) -> None:
"""Handle for discovered platform."""
await async_setup_platform(platform, discovery_info=info)
discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered)
@callback
def check_templates_warn(hass: HomeAssistant, tpl: template.Template) -> None:
"""Warn user that passing templates to notify service is deprecated."""
if tpl.is_static or hass.data.get("notify_template_warned"):
return
hass.data["notify_template_warned"] = True
LOGGER.warning(
"Passing templates to notify service is deprecated and will be removed in 2021.12. "
"Automations and scripts handle templates automatically"
)
@bind_hass
async def async_reload(hass: HomeAssistant, integration_name: str) -> None:
"""Register notify services for an integration."""
if not _async_integration_has_notify_services(hass, integration_name):
return
tasks = [
notify_service.async_register_services()
for notify_service in hass.data[NOTIFY_SERVICES][integration_name]
]
await asyncio.gather(*tasks)
@bind_hass
async def async_reset_platform(hass: HomeAssistant, integration_name: str) -> None:
"""Unregister notify services for an integration."""
if not _async_integration_has_notify_services(hass, integration_name):
return
tasks = [
notify_service.async_unregister_services()
for notify_service in hass.data[NOTIFY_SERVICES][integration_name]
]
await asyncio.gather(*tasks)
del hass.data[NOTIFY_SERVICES][integration_name]
def _async_integration_has_notify_services(
hass: HomeAssistant, integration_name: str
) -> bool:
"""Determine if an integration has notify services registered."""
if (
NOTIFY_SERVICES not in hass.data
or integration_name not in hass.data[NOTIFY_SERVICES]
):
return False
return True
class BaseNotificationService:
"""An abstract class for notification services."""
# While not purely typed, it makes typehinting more useful for us
# and removes the need for constant None checks or asserts.
hass: HomeAssistant = None # type: ignore[assignment]
# Name => target
registered_targets: dict[str, str]
def send_message(self, message: str, **kwargs: Any) -> None:
"""Send a message.
kwargs can contain ATTR_TITLE to specify a title.
"""
raise NotImplementedError()
async def async_send_message(self, message: str, **kwargs: Any) -> None:
"""Send a message.
kwargs can contain ATTR_TITLE to specify a title.
"""
await self.hass.async_add_executor_job(
partial(self.send_message, message, **kwargs)
)
async def _async_notify_message_service(self, service: ServiceCall) -> None:
"""Handle sending notification message service calls."""
kwargs = {}
message = service.data[ATTR_MESSAGE]
if title := service.data.get(ATTR_TITLE):
check_templates_warn(self.hass, title)
title.hass = self.hass
kwargs[ATTR_TITLE] = title.async_render(parse_result=False)
if self.registered_targets.get(service.service) is not None:
kwargs[ATTR_TARGET] = [self.registered_targets[service.service]]
elif service.data.get(ATTR_TARGET) is not None:
kwargs[ATTR_TARGET] = service.data.get(ATTR_TARGET)
check_templates_warn(self.hass, message)
message.hass = self.hass
kwargs[ATTR_MESSAGE] = message.async_render(parse_result=False)
kwargs[ATTR_DATA] = service.data.get(ATTR_DATA)
await self.async_send_message(**kwargs)
async def async_setup(
self,
hass: HomeAssistant,
service_name: str,
target_service_name_prefix: str,
) -> None:
"""Store the data for the notify service."""
# pylint: disable=attribute-defined-outside-init
self.hass = hass
self._service_name = service_name
self._target_service_name_prefix = target_service_name_prefix
self.registered_targets = {}
# Load service descriptions from notify/services.yaml
integration = await async_get_integration(hass, DOMAIN)
services_yaml = integration.file_path / "services.yaml"
self.services_dict = cast(
dict, await hass.async_add_executor_job(load_yaml, str(services_yaml))
)
async def async_register_services(self) -> None:
"""Create or update the notify services."""
if hasattr(self, "targets"):
stale_targets = set(self.registered_targets)
for name, target in self.targets.items(): # type: ignore[attr-defined]
target_name = slugify(f"{self._target_service_name_prefix}_{name}")
if target_name in stale_targets:
stale_targets.remove(target_name)
if (
target_name in self.registered_targets
and target == self.registered_targets[target_name]
):
continue
self.registered_targets[target_name] = target
self.hass.services.async_register(
DOMAIN,
target_name,
self._async_notify_message_service,
schema=NOTIFY_SERVICE_SCHEMA,
)
# Register the service description
service_desc = {
CONF_NAME: f"Send a notification via {target_name}",
CONF_DESCRIPTION: f"Sends a notification message using the {target_name} integration.",
CONF_FIELDS: self.services_dict[SERVICE_NOTIFY][CONF_FIELDS],
}
async_set_service_schema(self.hass, DOMAIN, target_name, service_desc)
for stale_target_name in stale_targets:
del self.registered_targets[stale_target_name]
self.hass.services.async_remove(
DOMAIN,
stale_target_name,
)
if self.hass.services.has_service(DOMAIN, self._service_name):
return
self.hass.services.async_register(
DOMAIN,
self._service_name,
self._async_notify_message_service,
schema=NOTIFY_SERVICE_SCHEMA,
)
# Register the service description
service_desc = {
CONF_NAME: f"Send a notification with {self._service_name}",
CONF_DESCRIPTION: f"Sends a notification message using the {self._service_name} service.",
CONF_FIELDS: self.services_dict[SERVICE_NOTIFY][CONF_FIELDS],
}
async_set_service_schema(self.hass, DOMAIN, self._service_name, service_desc)
async def async_unregister_services(self) -> None:
"""Unregister the notify services."""
if self.registered_targets:
remove_targets = set(self.registered_targets)
for remove_target_name in remove_targets:
del self.registered_targets[remove_target_name]
self.hass.services.async_remove(
DOMAIN,
remove_target_name,
)
if not self.hass.services.has_service(DOMAIN, self._service_name):
return
self.hass.services.async_remove(
DOMAIN,
self._service_name,
)
|
{
"content_hash": "68c5141d99010f2e23248c6417887fd1",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 107,
"avg_line_length": 36.577777777777776,
"alnum_prop": 0.6136087484811664,
"repo_name": "GenericStudent/home-assistant",
"id": "af29a9fba99f960fdbaebf5e284752206099af58",
"size": "11522",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/notify/legacy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class MetasrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="metasrc", parent_name="scattergl", **kwargs):
super(MetasrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
{
"content_hash": "61de5c9970602e0f6cfbf2c239025670",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 81,
"avg_line_length": 36.666666666666664,
"alnum_prop": 0.6068181818181818,
"repo_name": "plotly/python-api",
"id": "4ecf38a5ff9e27341d9ffce3c765239c6cdcbb11",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scattergl/_metasrc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
"""Entry points into the Beta API of gRPC Python."""
# threading is referenced from specification in this module.
import abc
import enum
import threading # pylint: disable=unused-import
# cardinality and face are referenced from specification in this module.
from grpc._adapter import _intermediary_low
from grpc._adapter import _low
from grpc._adapter import _types
from grpc.beta import _connectivity_channel
from grpc.beta import _server
from grpc.beta import _stub
from grpc.beta import interfaces
from grpc.framework.common import cardinality # pylint: disable=unused-import
from grpc.framework.interfaces.face import face # pylint: disable=unused-import
_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
'Exception calling channel subscription callback!')
class ChannelCredentials(object):
"""A value encapsulating the data required to create a secure Channel.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(self, low_credentials):
self._low_credentials = low_credentials
def ssl_channel_credentials(root_certificates, private_key, certificate_chain):
"""Creates a ChannelCredentials for use with an SSL-enabled Channel.
Args:
root_certificates: The PEM-encoded root certificates or None to ask for
them to be retrieved from a default location.
private_key: The PEM-encoded private key to use or None if no private key
should be used.
certificate_chain: The PEM-encoded certificate chain to use or None if no
certificate chain should be used.
Returns:
A ChannelCredentials for use with an SSL-enabled Channel.
"""
return ChannelCredentials(_low.channel_credentials_ssl(
root_certificates, private_key, certificate_chain))
class CallCredentials(object):
"""A value encapsulating data asserting an identity over an *established*
channel. May be composed with ChannelCredentials to always assert identity for
every call over that channel.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(self, low_credentials):
self._low_credentials = low_credentials
def metadata_call_credentials(metadata_plugin, name=None):
"""Construct CallCredentials from an interfaces.GRPCAuthMetadataPlugin.
Args:
metadata_plugin: An interfaces.GRPCAuthMetadataPlugin to use in constructing
the CallCredentials object.
Returns:
A CallCredentials object for use in a GRPCCallOptions object.
"""
if name is None:
name = metadata_plugin.__name__
return CallCredentials(
_low.call_credentials_metadata_plugin(metadata_plugin, name))
def composite_call_credentials(call_credentials, additional_call_credentials):
"""Compose two CallCredentials to make a new one.
Args:
call_credentials: A CallCredentials object.
additional_call_credentials: Another CallCredentials object to compose on
top of call_credentials.
Returns:
A CallCredentials object for use in a GRPCCallOptions object.
"""
return CallCredentials(
_low.call_credentials_composite(
call_credentials._low_credentials,
additional_call_credentials._low_credentials))
def composite_channel_credentials(channel_credentials,
additional_call_credentials):
"""Compose ChannelCredentials on top of client credentials to make a new one.
Args:
channel_credentials: A ChannelCredentials object.
additional_call_credentials: A CallCredentials object to compose on
top of channel_credentials.
Returns:
A ChannelCredentials object for use in a GRPCCallOptions object.
"""
return ChannelCredentials(
_low.channel_credentials_composite(
channel_credentials._low_credentials,
additional_call_credentials._low_credentials))
class Channel(object):
"""A channel to a remote host through which RPCs may be conducted.
Only the "subscribe" and "unsubscribe" methods are supported for application
use. This class' instance constructor and all other attributes are
unsupported.
"""
def __init__(self, low_channel, intermediary_low_channel):
self._low_channel = low_channel
self._intermediary_low_channel = intermediary_low_channel
self._connectivity_channel = _connectivity_channel.ConnectivityChannel(
low_channel)
def subscribe(self, callback, try_to_connect=None):
"""Subscribes to this Channel's connectivity.
Args:
callback: A callable to be invoked and passed an
interfaces.ChannelConnectivity identifying this Channel's connectivity.
The callable will be invoked immediately upon subscription and again for
every change to this Channel's connectivity thereafter until it is
unsubscribed.
try_to_connect: A boolean indicating whether or not this Channel should
attempt to connect if it is not already connected and ready to conduct
RPCs.
"""
self._connectivity_channel.subscribe(callback, try_to_connect)
def unsubscribe(self, callback):
"""Unsubscribes a callback from this Channel's connectivity.
Args:
callback: A callable previously registered with this Channel from having
been passed to its "subscribe" method.
"""
self._connectivity_channel.unsubscribe(callback)
def insecure_channel(host, port):
"""Creates an insecure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
port: The port of the remote host to which to connect.
Returns:
A Channel to the remote host through which RPCs may be conducted.
"""
intermediary_low_channel = _intermediary_low.Channel(
'%s:%d' % (host, port), None)
return Channel(intermediary_low_channel._internal, intermediary_low_channel) # pylint: disable=protected-access
def secure_channel(host, port, channel_credentials):
"""Creates a secure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
port: The port of the remote host to which to connect.
channel_credentials: A ChannelCredentials.
Returns:
A secure Channel to the remote host through which RPCs may be conducted.
"""
intermediary_low_channel = _intermediary_low.Channel(
'%s:%d' % (host, port), channel_credentials._low_credentials)
return Channel(intermediary_low_channel._internal, intermediary_low_channel) # pylint: disable=protected-access
class StubOptions(object):
"""A value encapsulating the various options for creation of a Stub.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(
self, host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size):
self.host = host
self.request_serializers = request_serializers
self.response_deserializers = response_deserializers
self.metadata_transformer = metadata_transformer
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
_EMPTY_STUB_OPTIONS = StubOptions(
None, None, None, None, None, None)
def stub_options(
host=None, request_serializers=None, response_deserializers=None,
metadata_transformer=None, thread_pool=None, thread_pool_size=None):
"""Creates a StubOptions value to be passed at stub creation.
All parameters are optional and should always be passed by keyword.
Args:
host: A host string to set on RPC calls.
request_serializers: A dictionary from service name-method name pair to
request serialization behavior.
response_deserializers: A dictionary from service name-method name pair to
response deserialization behavior.
metadata_transformer: A callable that given a metadata object produces
another metadata object to be used in the underlying communication on the
wire.
thread_pool: A thread pool to use in stubs.
thread_pool_size: The size of thread pool to create for use in stubs;
ignored if thread_pool has been passed.
Returns:
A StubOptions value created from the passed parameters.
"""
return StubOptions(
host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size)
def generic_stub(channel, options=None):
"""Creates a face.GenericStub on which RPCs can be made.
Args:
channel: A Channel for use by the created stub.
options: A StubOptions customizing the created stub.
Returns:
A face.GenericStub on which RPCs can be made.
"""
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
return _stub.generic_stub(
channel._intermediary_low_channel, effective_options.host, # pylint: disable=protected-access
effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers, effective_options.thread_pool,
effective_options.thread_pool_size)
def dynamic_stub(channel, service, cardinalities, options=None):
"""Creates a face.DynamicStub with which RPCs can be invoked.
Args:
channel: A Channel for the returned face.DynamicStub to use.
service: The package-qualified full name of the service.
cardinalities: A dictionary from RPC method name to cardinality.Cardinality
value identifying the cardinality of the RPC method.
options: An optional StubOptions value further customizing the functionality
of the returned face.DynamicStub.
Returns:
A face.DynamicStub with which RPCs can be invoked.
"""
effective_options = StubOptions() if options is None else options
return _stub.dynamic_stub(
channel._intermediary_low_channel, effective_options.host, service, # pylint: disable=protected-access
cardinalities, effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers, effective_options.thread_pool,
effective_options.thread_pool_size)
class ServerCredentials(object):
"""A value encapsulating the data required to open a secure port on a Server.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(self, low_credentials):
self._low_credentials = low_credentials
def ssl_server_credentials(
private_key_certificate_chain_pairs, root_certificates=None,
require_client_auth=False):
"""Creates a ServerCredentials for use with an SSL-enabled Server.
Args:
private_key_certificate_chain_pairs: A nonempty sequence each element of
which is a pair the first element of which is a PEM-encoded private key
and the second element of which is the corresponding PEM-encoded
certificate chain.
root_certificates: PEM-encoded client root certificates to be used for
verifying authenticated clients. If omitted, require_client_auth must also
be omitted or be False.
require_client_auth: A boolean indicating whether or not to require clients
to be authenticated. May only be True if root_certificates is not None.
Returns:
A ServerCredentials for use with an SSL-enabled Server.
"""
if len(private_key_certificate_chain_pairs) == 0:
raise ValueError(
'At least one private key-certificate chain pairis required!')
elif require_client_auth and root_certificates is None:
raise ValueError(
'Illegal to require client auth without providing root certificates!')
else:
return ServerCredentials(_low.server_credentials_ssl(
root_certificates, private_key_certificate_chain_pairs,
require_client_auth))
class ServerOptions(object):
"""A value encapsulating the various options for creation of a Server.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(
self, multi_method_implementation, request_deserializers,
response_serializers, thread_pool, thread_pool_size, default_timeout,
maximum_timeout):
self.multi_method_implementation = multi_method_implementation
self.request_deserializers = request_deserializers
self.response_serializers = response_serializers
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
self.default_timeout = default_timeout
self.maximum_timeout = maximum_timeout
_EMPTY_SERVER_OPTIONS = ServerOptions(
None, None, None, None, None, None, None)
def server_options(
multi_method_implementation=None, request_deserializers=None,
response_serializers=None, thread_pool=None, thread_pool_size=None,
default_timeout=None, maximum_timeout=None):
"""Creates a ServerOptions value to be passed at server creation.
All parameters are optional and should always be passed by keyword.
Args:
multi_method_implementation: A face.MultiMethodImplementation to be called
to service an RPC if the server has no specific method implementation for
the name of the RPC for which service was requested.
request_deserializers: A dictionary from service name-method name pair to
request deserialization behavior.
response_serializers: A dictionary from service name-method name pair to
response serialization behavior.
thread_pool: A thread pool to use in stubs.
thread_pool_size: The size of thread pool to create for use in stubs;
ignored if thread_pool has been passed.
default_timeout: A duration in seconds to allow for RPC service when
servicing RPCs that did not include a timeout value when invoked.
maximum_timeout: A duration in seconds to allow for RPC service when
servicing RPCs no matter what timeout value was passed when the RPC was
invoked.
Returns:
A StubOptions value created from the passed parameters.
"""
return ServerOptions(
multi_method_implementation, request_deserializers, response_serializers,
thread_pool, thread_pool_size, default_timeout, maximum_timeout)
def server(service_implementations, options=None):
"""Creates an interfaces.Server with which RPCs can be serviced.
Args:
service_implementations: A dictionary from service name-method name pair to
face.MethodImplementation.
options: An optional ServerOptions value further customizing the
functionality of the returned Server.
Returns:
An interfaces.Server with which RPCs can be serviced.
"""
effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
return _server.server(
service_implementations, effective_options.multi_method_implementation,
effective_options.request_deserializers,
effective_options.response_serializers, effective_options.thread_pool,
effective_options.thread_pool_size, effective_options.default_timeout,
effective_options.maximum_timeout)
|
{
"content_hash": "dbab315e400126cf41dcb607bef75405",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 114,
"avg_line_length": 38.47979797979798,
"alnum_prop": 0.7437327733298333,
"repo_name": "msiedlarek/grpc",
"id": "a0ca330d2c02bda4883a5643daaf08b1c0edf53d",
"size": "16767",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/python/grpcio/grpc/beta/implementations.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "9457"
},
{
"name": "C",
"bytes": "4479055"
},
{
"name": "C#",
"bytes": "1016752"
},
{
"name": "C++",
"bytes": "1213494"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "JavaScript",
"bytes": "255752"
},
{
"name": "Makefile",
"bytes": "539537"
},
{
"name": "Objective-C",
"bytes": "254538"
},
{
"name": "PHP",
"bytes": "93145"
},
{
"name": "Protocol Buffer",
"bytes": "95361"
},
{
"name": "Python",
"bytes": "1562726"
},
{
"name": "Ruby",
"bytes": "425758"
},
{
"name": "Shell",
"bytes": "36138"
},
{
"name": "Swift",
"bytes": "5279"
}
],
"symlink_target": ""
}
|
__author__ = 'Joe Linn'
import unittest
import pylastica
from tests.base import Base
class FilteredTest(unittest.TestCase, Base):
def test_filtered(self):
client = self._get_client()
index = client.get_index('test')
index.create(options=True)
doc_type = index.get_doc_type('helloworld')
doc_type.add_document(pylastica.Document(1, {'id': 1, 'email': 'test@test.com', 'username': 'bobloblaw', 'test': ['2', '3', '5']}))
doc_type.add_document(pylastica.Document(2, {'id': 2, 'email': 'test@test.com', 'username': 'frank', 'test': ['2', '3', '5']}))
index.refresh()
query_string = pylastica.query.QueryString('test*')
filter1 = pylastica.filter.Term().set_term('username', 'frank')
filter2 = pylastica.filter.Term().set_term('username', 'asdfasedf')
query1 = pylastica.query.Filtered(query_string, filter1)
query2 = pylastica.query.Filtered(query_string, filter2)
result_set = doc_type.search(query_string)
self.assertEqual(2, len(result_set))
result_set = doc_type.search(query1)
self.assertEqual(1, len(result_set))
result_set = doc_type.search(query2)
self.assertEqual(0, len(result_set))
index.delete()
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "cdb64b0b496b5387c87b3657af4a7dca",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 139,
"avg_line_length": 35.5945945945946,
"alnum_prop": 0.6203492786636294,
"repo_name": "jlinn/pylastica",
"id": "3b5b015aea6fb7103f9136aa14f83c5391479957",
"size": "1317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/query/test_filtered.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6774"
},
{
"name": "Python",
"bytes": "547260"
},
{
"name": "Shell",
"bytes": "1771"
}
],
"symlink_target": ""
}
|
from py4j.protocol import Py4JJavaError
from pyflink.table import expressions as expr
from pyflink.table.window import Session, Slide, Tumble, Over
from pyflink.testing.test_case_utils import PyFlinkStreamTableTestCase, \
PyFlinkBatchTableTestCase
class StreamTableWindowTests(PyFlinkStreamTableTestCase):
def test_over_window(self):
t_env = self.t_env
t = t_env.from_elements([(1, 1, "Hello")], ['a', 'b', 'c'])
result = t.over_window(
Over.partition_by(t.c)
.order_by(t.a)
.preceding(expr.row_interval(2))
.following(expr.CURRENT_ROW)
.alias("w"))
self.assertRaisesRegex(
Py4JJavaError, "Ordering must be defined on a time attribute",
result.select, expr.col("b").sum.over(expr.col("w")))
class BatchTableWindowTests(PyFlinkBatchTableTestCase):
def test_tumble_window(self):
t = self.t_env.from_elements([(1, 1, "Hello")], ["a", "b", "c"])
result = t.window(Tumble.over(expr.row_interval(2)).on(expr.col("a")).alias("w"))\
.group_by(expr.col('w'), expr.col('c')).select(t.b.sum)
query_operation = result._j_table.getQueryOperation().getChildren().get(0)
self.assertEqual('[c]', query_operation.getGroupingExpressions().toString())
self.assertEqual('TumbleWindow(field: [a], size: [2])',
query_operation.getGroupWindow().asSummaryString())
def test_slide_window(self):
t = self.t_env.from_elements([(1000, 1, "Hello")], ["a", "b", "c"])
result = t.window(Slide.over(expr.lit(2).seconds).every(expr.lit(1).seconds).on(t.a)
.alias("w")).group_by(expr.col('w'), expr.col('c')).select(t.b.sum)
query_operation = result._j_table.getQueryOperation().getChildren().get(0)
self.assertEqual('[c]', query_operation.getGroupingExpressions().toString())
self.assertEqual('SlideWindow(field: [a], slide: [1000], size: [2000])',
query_operation.getGroupWindow().asSummaryString())
def test_session_window(self):
t = self.t_env.from_elements([(1000, 1, "Hello")], ["a", "b", "c"])
result = t.window(Session.with_gap(expr.lit(1).seconds).on(t.a).alias("w"))\
.group_by(expr.col('w'), expr.col('c')).select(t.b.sum)
query_operation = result._j_table.getQueryOperation().getChildren().get(0)
self.assertEqual('[c]', query_operation.getGroupingExpressions().toString())
self.assertEqual('SessionWindow(field: [a], gap: [1000])',
query_operation.getGroupWindow().asSummaryString())
if __name__ == '__main__':
import unittest
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
{
"content_hash": "4e5c839a887cf46ae0c1a7a1e5a65d83",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 93,
"avg_line_length": 43.19117647058823,
"alnum_prop": 0.6142322097378277,
"repo_name": "gyfora/flink",
"id": "26bd9a4519313e5f20a2c40df37726a148ce9598",
"size": "3896",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "flink-python/pyflink/table/tests/test_window.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "20596"
},
{
"name": "Batchfile",
"bytes": "1863"
},
{
"name": "C",
"bytes": "847"
},
{
"name": "Cython",
"bytes": "130661"
},
{
"name": "Dockerfile",
"bytes": "5563"
},
{
"name": "FreeMarker",
"bytes": "92068"
},
{
"name": "GAP",
"bytes": "139514"
},
{
"name": "HTML",
"bytes": "154937"
},
{
"name": "HiveQL",
"bytes": "119516"
},
{
"name": "Java",
"bytes": "92548821"
},
{
"name": "JavaScript",
"bytes": "7038"
},
{
"name": "Less",
"bytes": "68979"
},
{
"name": "Makefile",
"bytes": "5134"
},
{
"name": "Python",
"bytes": "2574816"
},
{
"name": "Scala",
"bytes": "10552608"
},
{
"name": "Shell",
"bytes": "521488"
},
{
"name": "TypeScript",
"bytes": "311274"
},
{
"name": "q",
"bytes": "9630"
}
],
"symlink_target": ""
}
|
"""
Synchronisation, Controllers
@author: Dominic König <dominic[at]aidiq[dot]com>
"""
module = request.controller
prefix = "sync" # common table prefix
module_name = T("Synchronization")
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
response.title = module_name
return dict(module_name=module_name)
# -----------------------------------------------------------------------------
def config():
""" Synchronization Settings Controller """
resourcename = "config"
# Get the record ID of the first and only record
s3mgr.load("sync_config")
table = db.sync_config
record = db().select(table.id, limitby=(0, 1)).first()
if not record:
record_id = table.insert()
else:
record_id = record.id
# Can't do anything else than update here
r = s3mgr.parse_request(args=[str(record_id), "update"],
extension="html")
output = r(list_btn=None)
return output
# -----------------------------------------------------------------------------
def repository():
""" Repository Management Controller """
resourcename = "repository"
tabs = [(T("Configuration"), None),
(T("Resources"), "task"),
(T("Schedule"), "job"),
(T("Log"), "log")
]
s3mgr.model.set_method("sync", "repository",
method="register",
action=s3mgr.sync)
def prep(r):
if r.interactive:
if r.component and r.id:
if r.component.alias == "job":
current.s3task.configure_tasktable_crud(
function="sync_synchronize",
args = [r.id],
vars = dict(user_id = auth.user.id))
response.s3.cancel = URL(c="sync", f="repository",
args=[str(r.id), r.component.alias])
return True
response.s3.prep = prep
def postp(r, output):
if r.interactive and r.id:
if r.component and r.component.alias == "job":
response.s3.actions = [
dict(label=str(T("Reset")),
_class="action-btn",
url=URL(c="sync", f="repository",
args=[str(r.id), "job", "[id]", "reset"]))
]
s3_action_buttons(r)
return output
response.s3.postp = postp
rheader = lambda r: s3db.sync_rheader(r, tabs=tabs)
output = s3_rest_controller(prefix, resourcename, rheader=rheader)
return output
# -----------------------------------------------------------------------------
def sync():
""" Synchronization """
if "resource" in request.get_vars:
tablename = request.get_vars["resource"]
if "_" in tablename:
# URL variables from peer
_vars = request.get_vars
get_vars=Storage(include_deleted=True)
if "repository" in _vars:
get_vars.update(repository=_vars["repository"])
if "msince" in _vars:
get_vars.update(msince=_vars["msince"])
# Request
prefix, name = tablename.split("_", 1)
r = s3mgr.parse_request(prefix=prefix,
name=name,
args=["sync"],
get_vars=get_vars)
# Response
output = r()
return output
raise HTTP(400, body=s3mgr.ERROR.BAD_REQUEST)
# -----------------------------------------------------------------------------
def log():
""" Log Reader """
resourcename = "log"
if "return" in request.get_vars:
there = request.get_vars["return"]
c, f = there.split(".", 1)
list_btn = URL(c=c, f=f,
args="sync_log")
else:
list_btn = URL(c="sync", f="log",
vars=request.get_vars)
list_btn = A(T("List all Entries"), _href=list_btn, _class="action-btn")
output = s3_rest_controller(prefix, resourcename,
subtitle=None,
rheader=s3base.S3SyncLog.rheader,
list_btn=list_btn)
return output
# END =========================================================================
|
{
"content_hash": "f692640fc1b37beed767fc46aa1d9490",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 79,
"avg_line_length": 33.194244604316545,
"alnum_prop": 0.43541395752058953,
"repo_name": "flavour/ssf",
"id": "4342bf3ece005f7cacc7ddb3a97371a515a8768a",
"size": "4640",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "controllers/sync.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "9763120"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Python",
"bytes": "21558751"
},
{
"name": "Shell",
"bytes": "1171"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
def remove_duplicate_pathtype(apps, schema_editor):
PathType = apps.get_model('compose', 'PathType')
ComposeRelPath = apps.get_model('compose', 'ComposeRelPath')
qs = PathType.objects.values('name')
uniq = set([])
dup = set([])
for x in qs:
if x['name'] not in uniq:
uniq.add(x['name'])
else:
dup.add(x['name'])
for name in dup:
q = PathType.objects.filter(name=name)
pt = q.first()
crps = ComposeRelPath.objects.filter(type__name=name)
for crp in crps:
crp.type = pt
crp.save()
for p in q.exclude(id=pt.id):
p.delete()
class Migration(migrations.Migration):
dependencies = [
('compose', '0010_auto_20160407_1322'),
]
operations = [
migrations.RunPython(remove_duplicate_pathtype, reverse_code=migrations.RunPython.noop),
]
|
{
"content_hash": "e389eac9470e1fe00e363468f61d4f30",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 96,
"avg_line_length": 29.848484848484848,
"alnum_prop": 0.5949238578680203,
"repo_name": "product-definition-center/product-definition-center",
"id": "1142667bce7d1694f575a31fd0d65516095001e6",
"size": "1009",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pdc/apps/compose/migrations/0011_remove_duplicate_pathtypes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2911"
},
{
"name": "HTML",
"bytes": "57193"
},
{
"name": "JavaScript",
"bytes": "7479"
},
{
"name": "Makefile",
"bytes": "2774"
},
{
"name": "Python",
"bytes": "1692817"
},
{
"name": "Shell",
"bytes": "1013"
}
],
"symlink_target": ""
}
|
from os.path import abspath
from sys import path
path.insert(0, abspath('../..'))
import settings
|
{
"content_hash": "1928950323bcf16cd9a0b288a26c4280",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 32,
"avg_line_length": 19.8,
"alnum_prop": 0.7272727272727273,
"repo_name": "kug3lblitz/Heat-Replay",
"id": "20bb760ee7d82f232df9e4f16c643498ddf41d1c",
"size": "99",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/code/db/analytics/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "890554"
},
{
"name": "Python",
"bytes": "52104"
}
],
"symlink_target": ""
}
|
__author__ = 'stefan'
from ediplug.smartplug import SmartPlug
|
{
"content_hash": "d74814e8043737e6ec63bbe8cded4e01",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 39,
"avg_line_length": 21,
"alnum_prop": 0.746031746031746,
"repo_name": "wendlers/ediplug-py",
"id": "8f7125fe7d8b4846394308fc4a0a469d0dd0bf5f",
"size": "63",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ediplug/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "54256"
}
],
"symlink_target": ""
}
|
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import sys
import qrc_resource
QTextCodec.setCodecForTr(QTextCodec.codecForName("utf8"))
class MainWindow(QMainWindow):
def __init__(self,parent=None):
super(MainWindow,self).__init__(parent)
self.setWindowTitle("QMainWindow")
self.text=QTextEdit()
self.setCentralWidget(self.text)
self.createActions()
self.createMenus()
self.createToolBars()
def createActions(self):
self.fileOpenAction=QAction(QIcon(":/fileopen.png"),self.tr("打开"),self)
self.fileOpenAction.setShortcut("Ctrl+O")
self.fileOpenAction.setStatusTip(self.tr("打开一个文件"))
self.connect(self.fileOpenAction,SIGNAL("triggered()"),self.slotOpenFile)
self.fileNewAction=QAction(QIcon(":/filenew.png"),self.tr("新建"),self)
self.fileNewAction.setShortcut("Ctrl+N")
self.fileNewAction.setStatusTip(self.tr("新建一个文件"))
self.connect(self.fileNewAction,SIGNAL("triggered()"),self.slotNewFile)
self.fileSaveAction=QAction(QIcon(":/filesave.png"),self.tr("保存"),self)
self.fileSaveAction.setShortcut("Ctrl+S")
self.fileSaveAction.setStatusTip(self.tr("保存文件"))
self.connect(self.fileSaveAction,SIGNAL("triggered()"),self.slotSaveFile)
self.exitAction=QAction(QIcon(":/filequit.png"),self.tr("退出"),self)
self.exitAction.setShortcut("Ctrl+Q")
self.setStatusTip(self.tr("退出"))
self.connect(self.exitAction,SIGNAL("triggered()"),self.close)
self.cutAction=QAction(QIcon(":/editcut.png"),self.tr("剪切"),self)
self.cutAction.setShortcut("Ctrl+X")
self.cutAction.setStatusTip(self.tr("剪切到粘贴板"))
self.connect(self.cutAction,SIGNAL("triggered()"),self.text.cut)
self.copyAction=QAction(QIcon(":/editcopy.png"),self.tr("复制"),self)
self.copyAction.setShortcut("Ctrl+C")
self.copyAction.setStatusTip(self.tr("复制到粘贴板"))
self.connect(self.copyAction,SIGNAL("triggered()"),self.text.copy)
self.pasteAction=QAction(QIcon(":/editpaste.png"),self.tr("粘贴"),self)
self.pasteAction.setShortcut("Ctrl+V")
self.pasteAction.setStatusTip(self.tr("粘贴内容到当前处"))
self.connect(self.pasteAction,SIGNAL("triggered()"),self.text.paste)
self.aboutAction=QAction(self.tr("关于"),self)
self.connect(self.aboutAction,SIGNAL("triggered()"),self.slotAbout)
def createMenus(self):
fileMenu=self.menuBar().addMenu(self.tr("文件"))
fileMenu.addAction(self.fileNewAction)
fileMenu.addAction(self.fileOpenAction)
fileMenu.addAction(self.fileSaveAction)
fileMenu.addAction(self.exitAction)
editMenu=self.menuBar().addMenu(self.tr("编辑"))
editMenu.addAction(self.copyAction)
editMenu.addAction(self.cutAction)
editMenu.addAction(self.pasteAction)
aboutMenu=self.menuBar().addMenu(self.tr("帮助"))
aboutMenu.addAction(self.aboutAction)
def createToolBars(self):
fileToolBar=self.addToolBar("File")
fileToolBar.addAction(self.fileNewAction)
fileToolBar.addAction(self.fileOpenAction)
fileToolBar.addAction(self.fileSaveAction)
editTool=self.addToolBar("Edit")
editTool.addAction(self.copyAction)
editTool.addAction(self.cutAction)
editTool.addAction(self.pasteAction)
def slotNewFile(self):
newWin=MainWindow()
newWin.show()
def slotOpenFile(self):
fileName=QFileDialog.getOpenFileName(self)
if fileName.isEmpty()==False:
if self.text.document().isEmpty():
self.loadFile(fileName)
else:
newWin=MainWindow()
newWin.show()
newWin.loadFile(fileName)
def loadFile(self,fileName):
file=QFile(fileName)
if file.open(QIODevice.ReadOnly|QIODevice.Text):
textStream=QTextStream(file)
while textStream.atEnd()==False:
self.text.append(textStream.readLine())
def slotSaveFile(self):
pass
def slotAbout(self):
QMessageBox.about("about me",self.tr("这是我们的第一个例子"))
app=QApplication(sys.argv)
main=MainWindow()
main.show()
app.exec_()
|
{
"content_hash": "3cc977328ca7a3917aa8cbc4a02b00c0",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 81,
"avg_line_length": 37.64035087719298,
"alnum_prop": 0.6569564204148217,
"repo_name": "ptphp/PyLib",
"id": "3115b7eea3cd4b8928de0223b7576e18b83f26f5",
"size": "4455",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/fangte/test1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1523"
},
{
"name": "C++",
"bytes": "7541"
},
{
"name": "CSS",
"bytes": "625731"
},
{
"name": "JavaScript",
"bytes": "4811257"
},
{
"name": "PHP",
"bytes": "34868"
},
{
"name": "Python",
"bytes": "3824172"
},
{
"name": "Ruby",
"bytes": "322"
},
{
"name": "SQL",
"bytes": "685656"
},
{
"name": "Shell",
"bytes": "4143"
}
],
"symlink_target": ""
}
|
from gpiozero import MotionSensor
import sys
def write_log(file_name, value):
out_file = open(file_name, 'w')
out_file.write(value)
out_file.close()
if len(sys.argv) != 2:
print("[!] please enter a output filename!")
exit(1)
pir = MotionSensor(4)
noticed = False
filename = sys.argv[1]
while True:
if pir.motion_detected:
if not noticed:
write_log(filename, '1')
noticed = True
else:
write_log(filename, '')
noticed = False
|
{
"content_hash": "5ae7633aa380e6ed70c47d5a4c5f080f",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 48,
"avg_line_length": 20.24,
"alnum_prop": 0.6027667984189723,
"repo_name": "syxanash/joshua_bot",
"id": "57f27b43cd9a383325316d8b4af13314e4b3a14b",
"size": "506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "other/util_scripts/sensor_script.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "655"
},
{
"name": "Ruby",
"bytes": "52507"
}
],
"symlink_target": ""
}
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
''' MAIN FUNCTION '''
def main():
# Grab 'data' from Demisto Arguments
data = demisto.args()['data']
# Encode the data, ignoring characters
try:
encoded_data = data.encode('ascii', 'ignore').decode("utf-8")
except Exception as e:
return_error(f'There was an error encoding the data.\nError:\n{str(e)}')
# Output the data and add results to war room
return_results(CommandResults(
readable_output=f'Success: {encoded_data}',
outputs_prefix='asciiencode.encoded',
outputs=encoded_data))
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
|
{
"content_hash": "79b68886ca3a6eb7d246c68b1ac68d9e",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 80,
"avg_line_length": 28.384615384615383,
"alnum_prop": 0.6341463414634146,
"repo_name": "demisto/content",
"id": "391f0c36e523834d5b31fdd34ddbbb1a0ac6e95e",
"size": "738",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Packs/CommonScripts/Scripts/EncodeToAscii/EncodeToAscii.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2146"
},
{
"name": "HTML",
"bytes": "205901"
},
{
"name": "JavaScript",
"bytes": "1584075"
},
{
"name": "PowerShell",
"bytes": "442288"
},
{
"name": "Python",
"bytes": "47881712"
},
{
"name": "Rich Text Format",
"bytes": "480911"
},
{
"name": "Shell",
"bytes": "108066"
},
{
"name": "YARA",
"bytes": "1185"
}
],
"symlink_target": ""
}
|
import io
import os
import sys
from typing import Generator
from typing import TextIO
import pytest
from _pytest.config import Config
from _pytest.config.argparsing import Parser
from _pytest.nodes import Item
from _pytest.stash import StashKey
fault_handler_stderr_key = StashKey[TextIO]()
fault_handler_originally_enabled_key = StashKey[bool]()
def pytest_addoption(parser: Parser) -> None:
help = (
"Dump the traceback of all threads if a test takes "
"more than TIMEOUT seconds to finish"
)
parser.addini("faulthandler_timeout", help, default=0.0)
def pytest_configure(config: Config) -> None:
import faulthandler
stderr_fd_copy = os.dup(get_stderr_fileno())
config.stash[fault_handler_stderr_key] = open(stderr_fd_copy, "w")
config.stash[fault_handler_originally_enabled_key] = faulthandler.is_enabled()
faulthandler.enable(file=config.stash[fault_handler_stderr_key])
def pytest_unconfigure(config: Config) -> None:
import faulthandler
faulthandler.disable()
# Close the dup file installed during pytest_configure.
if fault_handler_stderr_key in config.stash:
config.stash[fault_handler_stderr_key].close()
del config.stash[fault_handler_stderr_key]
if config.stash.get(fault_handler_originally_enabled_key, False):
# Re-enable the faulthandler if it was originally enabled.
faulthandler.enable(file=get_stderr_fileno())
def get_stderr_fileno() -> int:
try:
fileno = sys.stderr.fileno()
# The Twisted Logger will return an invalid file descriptor since it is not backed
# by an FD. So, let's also forward this to the same code path as with pytest-xdist.
if fileno == -1:
raise AttributeError()
return fileno
except (AttributeError, io.UnsupportedOperation):
# pytest-xdist monkeypatches sys.stderr with an object that is not an actual file.
# https://docs.python.org/3/library/faulthandler.html#issue-with-file-descriptors
# This is potentially dangerous, but the best we can do.
return sys.__stderr__.fileno()
def get_timeout_config_value(config: Config) -> float:
return float(config.getini("faulthandler_timeout") or 0.0)
@pytest.hookimpl(hookwrapper=True, trylast=True)
def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
timeout = get_timeout_config_value(item.config)
stderr = item.config.stash[fault_handler_stderr_key]
if timeout > 0 and stderr is not None:
import faulthandler
faulthandler.dump_traceback_later(timeout, file=stderr)
try:
yield
finally:
faulthandler.cancel_dump_traceback_later()
else:
yield
@pytest.hookimpl(tryfirst=True)
def pytest_enter_pdb() -> None:
"""Cancel any traceback dumping due to timeout before entering pdb."""
import faulthandler
faulthandler.cancel_dump_traceback_later()
@pytest.hookimpl(tryfirst=True)
def pytest_exception_interact() -> None:
"""Cancel any traceback dumping due to an interactive exception being
raised."""
import faulthandler
faulthandler.cancel_dump_traceback_later()
|
{
"content_hash": "245826fef8f0d25bed7d0408ed9a5796",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 91,
"avg_line_length": 32.845360824742265,
"alnum_prop": 0.7033898305084746,
"repo_name": "RonnyPfannschmidt/pytest",
"id": "b9c925582ca0e2a5275a258740ab9f2e856fc7d5",
"size": "3186",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "src/_pytest/faulthandler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Gherkin",
"bytes": "192"
},
{
"name": "Python",
"bytes": "2604880"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/space/weapon/shared_shield_effectiveness_intensifier_mk3.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "d9638cf7cd90f22494a70e5aa9fc8026",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 104,
"avg_line_length": 25.615384615384617,
"alnum_prop": 0.7117117117117117,
"repo_name": "obi-two/Rebelion",
"id": "6082a4780a782e3b9e0c69552bf6399aa14cf959",
"size": "478",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/draft_schematic/space/weapon/shared_shield_effectiveness_intensifier_mk3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
import logging
def setup_logging():
logging.basicConfig(level=logging.DEBUG)
formatter = logging.Formatter('#ts:%(asctime)s#level:%(levelname)s#name:%(name)s%(message)s')
err_handler = logging.FileHandler('errorlog-server.log', mode='w', delay=True)
err_handler.setLevel(logging.ERROR)
err_handler.setFormatter(formatter)
logging.getLogger('').addHandler(err_handler)
fh = logging.FileHandler('server.log')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logging.getLogger('').addHandler(fh)
|
{
"content_hash": "cb89cd91a26cc5b611993067ab4fa043",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 97,
"avg_line_length": 31.88235294117647,
"alnum_prop": 0.7158671586715867,
"repo_name": "chrido/i13monserver",
"id": "bfee1cb2ec1b7ef8c3198856e9b1f1dd4da96a0a",
"size": "542",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "util/logger_factory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24054"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
from flexget.utils.imdb import make_url as make_imdb_url
try:
from flexget.plugins.filter.movie_queue import queue_get
except ImportError:
raise plugin.DependencyError(issued_by='emit_movie_queue', missing='movie_queue')
log = logging.getLogger('emit_movie_queue')
class EmitMovieQueue(object):
"""Use your movie queue as an input by emitting the content of it"""
schema = {
'oneOf': [
{'type': 'boolean'},
{
'type': 'object',
'properties': {
'year': {'type': 'boolean'},
'quality': {'type': 'boolean'},
},
'additionalProperties': False
}
]
}
def prepare_config(self, config):
if isinstance(config, bool):
config = {}
config.setdefault('year', True)
config.setdefault('quality', False)
return config
def on_task_input(self, task, config):
if not config:
return
config = self.prepare_config(config)
entries = []
for queue_item in queue_get(session=task.session):
entry = Entry()
# make sure the entry has IMDB fields filled
entry['url'] = ''
if queue_item.imdb_id:
entry['imdb_id'] = queue_item.imdb_id
entry['imdb_url'] = make_imdb_url(queue_item.imdb_id)
if queue_item.tmdb_id:
entry['tmdb_id'] = queue_item.tmdb_id
plugin.get_plugin_by_name('tmdb_lookup').instance.lookup(entry)
# check if title is a imdb url (leftovers from old database?)
# TODO: maybe this should be fixed at the queue_get ...
if 'http://' in queue_item.title:
log.debug('queue contains url instead of title')
if entry.get('movie_name'):
entry['title'] = entry['movie_name']
else:
log.error('Found imdb url in imdb queue, but lookup failed: %s' % entry['title'])
continue
else:
# normal title
entry['title'] = queue_item.title
# Add the year and quality if configured to
if config.get('year') and entry.get('movie_year'):
entry['title'] += ' %s' % entry['movie_year']
# TODO: qualities can now be ranges.. how should we handle this?
if config.get('quality') and queue_item.quality != 'ANY':
log.info('quality option of emit_movie_queue is disabled while we figure out how to handle ranges')
#entry['title'] += ' %s' % queue_item.quality
entries.append(entry)
log.debug('Added title and IMDB id to new entry: %s - %s' %
(entry['title'], entry['imdb_id']))
return entries
@event('plugin.register')
def register_plugin():
plugin.register(EmitMovieQueue, 'emit_movie_queue', api_ver=2)
|
{
"content_hash": "c5ebe19e30923924318022ef5728b6e7",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 115,
"avg_line_length": 36.895348837209305,
"alnum_prop": 0.5534194768358021,
"repo_name": "tvcsantos/Flexget",
"id": "fb5d4f618ac5f18e0dd9c6637dbe1f2a1115bfac",
"size": "3173",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "flexget/plugins/input/emit_movie_queue.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56725"
},
{
"name": "JavaScript",
"bytes": "455222"
},
{
"name": "Python",
"bytes": "1957167"
}
],
"symlink_target": ""
}
|
"""
Current WsgiDAV version number.
http://peak.telecommunity.com/DevCenter/setuptools#specifying-your-project-s-version
http://peak.telecommunity.com/DevCenter/setuptools#tagging-and-daily-build-or-snapshot-releases
Example "1.2.0b1", "1.2.0pre1" or "1.2.0"
"""
__version__ = "1.2.1pre1"
|
{
"content_hash": "8f154615aa4449f2cf5f5308c731296c",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 95,
"avg_line_length": 37.25,
"alnum_prop": 0.7315436241610739,
"repo_name": "StraNNiKK/wsgidav",
"id": "277ea8d33a50272749e9a89ca60762a82ae03090",
"size": "298",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wsgidav/version.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "741528"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.contrib import auth
from django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager
from django.contrib.auth.signals import user_logged_in
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied
from django.core.mail import send_mail
from django.db import models
from django.db.models.manager import EmptyManager
from django.utils import six, timezone
from django.utils.deprecation import CallableFalse, CallableTrue
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from .validators import ASCIIUsernameValidator, UnicodeUsernameValidator
def update_last_login(sender, user, **kwargs):
"""
A signal receiver which updates the last_login date for
the user logging in.
"""
user.last_login = timezone.now()
user.save(update_fields=['last_login'])
user_logged_in.connect(update_last_login)
class PermissionManager(models.Manager):
use_in_migrations = True
def get_by_natural_key(self, codename, app_label, model):
return self.get(
codename=codename,
content_type=ContentType.objects.db_manager(self.db).get_by_natural_key(app_label, model),
)
@python_2_unicode_compatible
class Permission(models.Model):
"""
The permissions system provides a way to assign permissions to specific
users and groups of users.
The permission system is used by the Django admin site, but may also be
useful in your own code. The Django admin site uses permissions as follows:
- The "add" permission limits the user's ability to view the "add" form
and add an object.
- The "change" permission limits a user's ability to view the change
list, view the "change" form and change an object.
- The "delete" permission limits the ability to delete an object.
Permissions are set globally per type of object, not per specific object
instance. It is possible to say "Mary may change news stories," but it's
not currently possible to say "Mary may change news stories, but only the
ones she created herself" or "Mary may only change news stories that have a
certain status or publication date."
Three basic permissions -- add, change and delete -- are automatically
created for each Django model.
"""
name = models.CharField(_('name'), max_length=255)
content_type = models.ForeignKey(
ContentType,
models.CASCADE,
verbose_name=_('content type'),
)
codename = models.CharField(_('codename'), max_length=100)
objects = PermissionManager()
class Meta:
verbose_name = _('permission')
verbose_name_plural = _('permissions')
unique_together = (('content_type', 'codename'),)
ordering = ('content_type__app_label', 'content_type__model',
'codename')
def __str__(self):
return "%s | %s | %s" % (
six.text_type(self.content_type.app_label),
six.text_type(self.content_type),
six.text_type(self.name))
def natural_key(self):
return (self.codename,) + self.content_type.natural_key()
natural_key.dependencies = ['contenttypes.contenttype']
class GroupManager(models.Manager):
"""
The manager for the auth's Group model.
"""
use_in_migrations = True
def get_by_natural_key(self, name):
return self.get(name=name)
@python_2_unicode_compatible
class Group(models.Model):
"""
Groups are a generic way of categorizing users to apply permissions, or
some other label, to those users. A user can belong to any number of
groups.
A user in a group automatically has all the permissions granted to that
group. For example, if the group Site editors has the permission
can_edit_home_page, any user in that group will have that permission.
Beyond permissions, groups are a convenient way to categorize users to
apply some label, or extended functionality, to them. For example, you
could create a group 'Special users', and you could write code that would
do special things to those users -- such as giving them access to a
members-only portion of your site, or sending them members-only email
messages.
"""
name = models.CharField(_('name'), max_length=80, unique=True)
permissions = models.ManyToManyField(
Permission,
verbose_name=_('permissions'),
blank=True,
)
objects = GroupManager()
class Meta:
verbose_name = _('group')
verbose_name_plural = _('groups')
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
class UserManager(BaseUserManager):
use_in_migrations = True
def _create_user(self, username, email, password, **extra_fields):
"""
Creates and saves a User with the given username, email and password.
"""
if not username:
raise ValueError('The given username must be set')
email = self.normalize_email(email)
username = self.model.normalize_username(username)
user = self.model(username=username, email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, username, email=None, password=None, **extra_fields):
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
return self._create_user(username, email, password, **extra_fields)
def create_superuser(self, username, email, password, **extra_fields):
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
if extra_fields.get('is_staff') is not True:
raise ValueError('Superuser must have is_staff=True.')
if extra_fields.get('is_superuser') is not True:
raise ValueError('Superuser must have is_superuser=True.')
return self._create_user(username, email, password, **extra_fields)
# A few helper functions for common logic between User and AnonymousUser.
def _user_get_all_permissions(user, obj):
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_all_permissions"):
permissions.update(backend.get_all_permissions(user, obj))
return permissions
def _user_has_perm(user, perm, obj):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_perm'):
continue
try:
if backend.has_perm(user, perm, obj):
return True
except PermissionDenied:
return False
return False
def _user_has_module_perms(user, app_label):
"""
A backend can raise `PermissionDenied` to short-circuit permission checking.
"""
for backend in auth.get_backends():
if not hasattr(backend, 'has_module_perms'):
continue
try:
if backend.has_module_perms(user, app_label):
return True
except PermissionDenied:
return False
return False
class PermissionsMixin(models.Model):
"""
A mixin class that adds the fields and methods necessary to support
Django's Group and Permission model using the ModelBackend.
"""
is_superuser = models.BooleanField(
_('superuser status'),
default=False,
help_text=_(
'Designates that this user has all permissions without '
'explicitly assigning them.'
),
)
groups = models.ManyToManyField(
Group,
verbose_name=_('groups'),
blank=True,
help_text=_(
'The groups this user belongs to. A user will get all permissions '
'granted to each of their groups.'
),
related_name="user_set",
related_query_name="user",
)
user_permissions = models.ManyToManyField(
Permission,
verbose_name=_('user permissions'),
blank=True,
help_text=_('Specific permissions for this user.'),
related_name="user_set",
related_query_name="user",
)
class Meta:
abstract = True
def get_group_permissions(self, obj=None):
"""
Returns a list of permission strings that this user has through their
groups. This method queries all available auth backends. If an object
is passed in, only permissions matching this object are returned.
"""
permissions = set()
for backend in auth.get_backends():
if hasattr(backend, "get_group_permissions"):
permissions.update(backend.get_group_permissions(self, obj))
return permissions
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj)
def has_perm(self, perm, obj=None):
"""
Returns True if the user has the specified permission. This method
queries all available auth backends, but returns immediately if any
backend returns True. Thus, a user who has permission from a single
auth backend is assumed to have permission in general. If an object is
provided, permissions for this specific object are checked.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
# Otherwise we need to check the backends.
return _user_has_perm(self, perm, obj)
def has_perms(self, perm_list, obj=None):
"""
Returns True if the user has each of the specified permissions. If
object is passed, it checks if the user has all required perms for this
object.
"""
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, app_label):
"""
Returns True if the user has any permissions in the given app label.
Uses pretty much the same logic as has_perm, above.
"""
# Active superusers have all permissions.
if self.is_active and self.is_superuser:
return True
return _user_has_module_perms(self, app_label)
class AbstractUser(AbstractBaseUser, PermissionsMixin):
"""
An abstract base class implementing a fully featured User model with
admin-compliant permissions.
Username and password are required. Other fields are optional.
"""
username_validator = UnicodeUsernameValidator() if six.PY3 else ASCIIUsernameValidator()
username = models.CharField(
_('username'),
max_length=150,
unique=True,
help_text=_('Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.'),
validators=[username_validator],
error_messages={
'unique': _("A user with that username already exists."),
},
)
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=30, blank=True)
email = models.EmailField(_('email address'), blank=True)
is_staff = models.BooleanField(
_('staff status'),
default=False,
help_text=_('Designates whether the user can log into this admin site.'),
)
is_active = models.BooleanField(
_('active'),
default=True,
help_text=_(
'Designates whether this user should be treated as active. '
'Unselect this instead of deleting accounts.'
),
)
date_joined = models.DateTimeField(_('date joined'), default=timezone.now)
objects = UserManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
abstract = True
def clean(self):
super(AbstractUser, self).clean()
self.email = self.__class__.objects.normalize_email(self.email)
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"Returns the short name for the user."
return self.first_name
def email_user(self, subject, message, from_email=None, **kwargs):
"""
Sends an email to this User.
"""
send_mail(subject, message, from_email, [self.email], **kwargs)
class User(AbstractUser):
"""
Users within the Django authentication system are represented by this
model.
Username, password and email are required. Other fields are optional.
"""
class Meta(AbstractUser.Meta):
swappable = 'AUTH_USER_MODEL'
@python_2_unicode_compatible
class AnonymousUser(object):
id = None
pk = None
username = ''
is_staff = False
is_active = False
is_superuser = False
_groups = EmptyManager(Group)
_user_permissions = EmptyManager(Permission)
def __init__(self):
pass
def __str__(self):
return 'AnonymousUser'
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return 1 # instances always return the same hash value
def save(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def delete(self):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def set_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def check_password(self, raw_password):
raise NotImplementedError("Django doesn't provide a DB representation for AnonymousUser.")
def _get_groups(self):
return self._groups
groups = property(_get_groups)
def _get_user_permissions(self):
return self._user_permissions
user_permissions = property(_get_user_permissions)
def get_group_permissions(self, obj=None):
return set()
def get_all_permissions(self, obj=None):
return _user_get_all_permissions(self, obj=obj)
def has_perm(self, perm, obj=None):
return _user_has_perm(self, perm, obj=obj)
def has_perms(self, perm_list, obj=None):
for perm in perm_list:
if not self.has_perm(perm, obj):
return False
return True
def has_module_perms(self, module):
return _user_has_module_perms(self, module)
@property
def is_anonymous(self):
return CallableTrue
@property
def is_authenticated(self):
return CallableFalse
def get_username(self):
return self.username
|
{
"content_hash": "4a125896184bf8f90407f06294391311",
"timestamp": "",
"source": "github",
"line_count": 453,
"max_line_length": 102,
"avg_line_length": 33.3355408388521,
"alnum_prop": 0.6463148135885041,
"repo_name": "rhertzog/django",
"id": "725563424a0cfad04d59566d4109f5dd489fcb9f",
"size": "15101",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "django/contrib/auth/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52530"
},
{
"name": "HTML",
"bytes": "173554"
},
{
"name": "JavaScript",
"bytes": "451012"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11981119"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
faminsts = IN[0]
elementlist = list()
for item in faminsts:
try:
n = UnwrapElement(item).Name
except:
try:
# for parameters...
n = UnwrapElement(item).Definition.Name
except:
n = None
# Use a built-in property of (wrapped) Dynamo elements for family symbols instead
if n == None:
try:
n = item.Name
except:
n = []
elementlist.append(n)
OUT = elementlist
|
{
"content_hash": "d73076b9066166b204aac08fb9f17d8d",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 82,
"avg_line_length": 19.82608695652174,
"alnum_prop": 0.6864035087719298,
"repo_name": "dimven/ClockworkForDynamo",
"id": "35f872e321501703e7fd2c2807e5a635f2c73a72",
"size": "456",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "nodes/0.8.x/python/Element.Name (Universal).py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "316146"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Story',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=160, verbose_name='Başlık')),
('text', models.TextField(verbose_name='Hikaye')),
('category', models.CharField(choices=[('Funny', 'Funny'), ('Mysterious', 'Mysterious')], max_length=15, verbose_name='Kategori')),
('language', models.CharField(choices=[('En', 'English'), ('Tr', 'Turkish')], max_length=10, verbose_name='İçerik Dili')),
('upvotes', models.IntegerField(default=0, verbose_name='Beğenme')),
('downvotes', models.IntegerField(default=0, verbose_name='Beğenmeme')),
('urlcode', models.CharField(blank=True, max_length=7, unique=True, verbose_name='Link Kodu')),
('active', models.BooleanField(default=True, verbose_name='Yayında')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('reports', models.IntegerField(default=0, verbose_name='Raporlanma')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name_plural': 'Stories',
'ordering': ['created'],
},
),
]
|
{
"content_hash": "86e189c7c2d2936527c9255324e1229d",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 147,
"avg_line_length": 47.05128205128205,
"alnum_prop": 0.5940054495912807,
"repo_name": "OrhanOdabasi/weirdbutreal",
"id": "9dc274b0fe11932b8f53d1849d4b17d6fa7b0e06",
"size": "1915",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "story/migrations/0001_initial.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11023"
},
{
"name": "HTML",
"bytes": "60259"
},
{
"name": "Python",
"bytes": "104338"
}
],
"symlink_target": ""
}
|
import subprocess
from setuptools import setup, find_packages
LONG_DESCRIPTION = """
Django REST app to help track page/screen views (and other events) in
offline-capable websites/web-apps/hybrid apps.
"""
def parse_markdown_readme():
"""
Convert README.md to RST via pandoc, and load into memory
(fallback to LONG_DESCRIPTION on failure)
"""
try:
subprocess.call(
['pandoc', '-t', 'rst', '-o', 'README.rst', 'README.md']
)
except OSError:
return LONG_DESCRIPTION
# Attempt to load output
try:
readme = open('README.rst')
except IOError:
return LONG_DESCRIPTION
else:
return readme.read()
setup(
name='owl',
version='0.1.0-dev',
author='S. Andrew Sheppard',
author_email='andrew@wq.io',
url='https://github.com/wq/offline-web-log',
license='MIT',
packages=['owl', 'owl.migrations'],
description=LONG_DESCRIPTION.strip(),
long_description=parse_markdown_readme(),
install_requires=[
'djangorestframework',#>=2.4',
'djangorestframework-bulk',
'swapper',
],
classifiers=[
'Framework :: Django',
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: Log Analysis',
'Topic :: System :: Logging',
'Topic :: Scientific/Engineering :: Visualization',
],
test_suite='tests',
tests_require=['ua_parser'],
)
|
{
"content_hash": "17e6790a116cd99a6a5b9888bc987196",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 69,
"avg_line_length": 28.721311475409838,
"alnum_prop": 0.5976027397260274,
"repo_name": "wq/offline-website-logger",
"id": "0fb91a4ea2b9b40ae28e8ee50442ff5249e4bec7",
"size": "1752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18527"
},
{
"name": "Shell",
"bytes": "143"
}
],
"symlink_target": ""
}
|
"""Tests for the tags of the ``image_gallery`` app."""
from django.contrib.auth.models import AnonymousUser
from django.contrib.sessions.middleware import SessionMiddleware
from django.template.context import RequestContext
from django.test import TestCase
from django.test.client import RequestFactory
from model_bakery import baker
from ..templatetags.image_gallery_tags import render_pictures
class RenderPicturesTestCase(TestCase):
"""Tests for the ``render_pictures`` tag."""
longMessage = True
def setUp(self):
# create context mock
request = RequestFactory().get('/')
request.user = AnonymousUser()
SessionMiddleware().process_request(request)
request.session.save()
self.context = RequestContext(request)
self.gallery = baker.make('image_gallery.Gallery', is_published=True,
folder=baker.make('filer.Folder'))
def test_tag(self):
# Returns None, because of an invalid selection name
self.assertFalse(render_pictures(self.context, selection='fail'))
# Returns empty queryset
self.assertFalse(render_pictures(self.context).get('pictures'))
# Returns two pictures
baker.make('filer.Image', folder=self.gallery.folder)
baker.make('filer.Image', folder=self.gallery.folder)
self.assertEqual(
render_pictures(self.context).get('pictures').count(), 2)
# Returns one picture, because amount was set to `1`
baker.make('filer.Image', folder=self.gallery.folder)
baker.make('filer.Image', folder=self.gallery.folder)
self.assertEqual(render_pictures(self.context, 'recent', 1).get(
'pictures').count(), 1)
# Returns three random pictures
self.assertEqual(
render_pictures(self.context, 'random').get('pictures').count(), 3)
|
{
"content_hash": "ca9cf1e27a512133a7e43166cd067cad",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 39.270833333333336,
"alnum_prop": 0.6726790450928382,
"repo_name": "bitmazk/cmsplugin-image-gallery",
"id": "f5906c3d459bb3049869f5511b3b2305b578fe90",
"size": "1885",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "image_gallery/tests/tags_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2579"
},
{
"name": "Python",
"bytes": "86952"
}
],
"symlink_target": ""
}
|
import os
from unittest import TestCase,mock
from tiingo.wsclient import TiingoWebsocketClient
from tiingo.exceptions import MissingRequiredArgumentError
class TestRestClientWithSession(TestCase):
def setUp(self):
def msg_cb(msg):
print(msg)
self.cb=msg_cb
self.config = {
'eventName':'subscribe',
'authorization':os.getenv("TIINGO_API_KEY"),
#see https://api.tiingo.com/documentation/websockets/iex > Request for more info
'eventData': {
'thresholdLevel':5
}
}
# test for missing or incorrectly supplied endpoints
def test_missing_or_wrong_endpoint(self):
with self.assertRaises(AttributeError) as ex:
TiingoWebsocketClient(config=self.config,on_msg_cb=self.cb)
self.assertTrue(type(ex.exception)==AttributeError)
with self.assertRaises(AttributeError) as ex:
TiingoWebsocketClient(config=self.config,endpoint='wq',on_msg_cb=self.cb)
self.assertTrue(type(ex.exception)==AttributeError)
# test for missing callback argument
def test_missing_msg_cb(self):
with self.assertRaises(MissingRequiredArgumentError) as ex:
TiingoWebsocketClient(config=self.config,endpoint='iex')
self.assertTrue(type(ex.exception)==MissingRequiredArgumentError)
# test for missing API keys in config dict and in os env
def test_missing_api_key(self):
with mock.patch.dict(os.environ, {}, clear=True): #clear env vars including the TIINGO_API_KEY
with self.assertRaises(RuntimeError) as ex:
TiingoWebsocketClient(config={},endpoint='iex',on_msg_cb=self.cb)
self.assertTrue(type(ex.exception)==RuntimeError)
|
{
"content_hash": "1c4181261506ed125af0110966d293cd",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 102,
"avg_line_length": 40.38636363636363,
"alnum_prop": 0.6702307259425999,
"repo_name": "hydrosquall/tiingo-python",
"id": "af3b7b8b2860aef2ee32692d328eaa9572f883ac",
"size": "1777",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_wsclient.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2375"
},
{
"name": "Python",
"bytes": "56798"
},
{
"name": "Shell",
"bytes": "87"
}
],
"symlink_target": ""
}
|
import glob
import json
import os
import yaml
_vendors_path = os.path.dirname(os.path.realpath(__file__))
_vendor_defaults = None
def get_profile(profile_name):
global _vendor_defaults
if _vendor_defaults is None:
_vendor_defaults = {}
for vendor in glob.glob(os.path.join(_vendors_path, '*.yaml')):
with open(vendor, 'r') as f:
vendor_data = yaml.safe_load(f)
_vendor_defaults[vendor_data['name']] = vendor_data['profile']
for vendor in glob.glob(os.path.join(_vendors_path, '*.json')):
with open(vendor, 'r') as f:
vendor_data = json.load(f)
_vendor_defaults[vendor_data['name']] = vendor_data['profile']
return _vendor_defaults.get(profile_name)
|
{
"content_hash": "47f417b6ae2c70984ba9cbe59a54d51c",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 78,
"avg_line_length": 33.78260869565217,
"alnum_prop": 0.6023166023166023,
"repo_name": "dtroyer/os-client-config",
"id": "3e1d20a5a848da78d12ce62de7ad5745f7f7fc2c",
"size": "1388",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "os_client_config/vendors/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "182550"
}
],
"symlink_target": ""
}
|
"""A base class session manager.
Authors:
* Zach Sailer
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import uuid
import sqlite3
from tornado import web
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.py3compat import unicode_type
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class SessionManager(LoggingConfigurable):
# Session database initialized below
_cursor = None
_connection = None
_columns = {'session_id', 'name', 'path', 'kernel_id'}
@property
def cursor(self):
"""Start a cursor and create a database called 'session'"""
if self._cursor is None:
self._cursor = self.connection.cursor()
self._cursor.execute("""CREATE TABLE session
(session_id, name, path, kernel_id)""")
return self._cursor
@property
def connection(self):
"""Start a database connection"""
if self._connection is None:
self._connection = sqlite3.connect(':memory:')
self._connection.row_factory = self.row_factory
return self._connection
def __del__(self):
"""Close connection once SessionManager closes"""
self.cursor.close()
def session_exists(self, name, path):
"""Check to see if the session for a given notebook exists"""
self.cursor.execute("SELECT * FROM session WHERE name=? AND path=?", (name, path))
reply = self.cursor.fetchone()
if reply is None:
return False
else:
return True
def new_session_id(self):
"Create a uuid for a new session"
return unicode_type(uuid.uuid4())
def create_session(self, name=None, path=None, kernel_id=None):
"""Creates a session and returns its model"""
session_id = self.new_session_id()
return self.save_session(session_id, name=name, path=path, kernel_id=kernel_id)
def save_session(self, session_id, name=None, path=None, kernel_id=None):
"""Saves the items for the session with the given session_id
Given a session_id (and any other of the arguments), this method
creates a row in the sqlite session database that holds the information
for a session.
Parameters
----------
session_id : str
uuid for the session; this method must be given a session_id
name : str
the .ipynb notebook name that started the session
path : str
the path to the named notebook
kernel_id : str
a uuid for the kernel associated with this session
Returns
-------
model : dict
a dictionary of the session model
"""
self.cursor.execute("INSERT INTO session VALUES (?,?,?,?)",
(session_id, name, path, kernel_id)
)
return self.get_session(session_id=session_id)
def get_session(self, **kwargs):
"""Returns the model for a particular session.
Takes a keyword argument and searches for the value in the session
database, then returns the rest of the session's info.
Parameters
----------
**kwargs : keyword argument
must be given one of the keywords and values from the session database
(i.e. session_id, name, path, kernel_id)
Returns
-------
model : dict
returns a dictionary that includes all the information from the
session described by the kwarg.
"""
if not kwargs:
raise TypeError("must specify a column to query")
conditions = []
for column in kwargs.keys():
if column not in self._columns:
raise TypeError("No such column: %r", column)
conditions.append("%s=?" % column)
query = "SELECT * FROM session WHERE %s" % (' AND '.join(conditions))
self.cursor.execute(query, list(kwargs.values()))
model = self.cursor.fetchone()
if model is None:
q = []
for key, value in kwargs.items():
q.append("%s=%r" % (key, value))
raise web.HTTPError(404, u'Session not found: %s' % (', '.join(q)))
return model
def update_session(self, session_id, **kwargs):
"""Updates the values in the session database.
Changes the values of the session with the given session_id
with the values from the keyword arguments.
Parameters
----------
session_id : str
a uuid that identifies a session in the sqlite3 database
**kwargs : str
the key must correspond to a column title in session database,
and the value replaces the current value in the session
with session_id.
"""
self.get_session(session_id=session_id)
if not kwargs:
# no changes
return
sets = []
for column in kwargs.keys():
if column not in self._columns:
raise TypeError("No such column: %r" % column)
sets.append("%s=?" % column)
query = "UPDATE session SET %s WHERE session_id=?" % (', '.join(sets))
self.cursor.execute(query, list(kwargs.values()) + [session_id])
@staticmethod
def row_factory(cursor, row):
"""Takes sqlite database session row and turns it into a dictionary"""
row = sqlite3.Row(cursor, row)
model = {
'id': row['session_id'],
'notebook': {
'name': row['name'],
'path': row['path']
},
'kernel': {
'id': row['kernel_id'],
}
}
return model
def list_sessions(self):
"""Returns a list of dictionaries containing all the information from
the session database"""
c = self.cursor.execute("SELECT * FROM session")
return list(c.fetchall())
def delete_session(self, session_id):
"""Deletes the row in the session database with given session_id"""
# Check that session exists before deleting
self.get_session(session_id=session_id)
self.cursor.execute("DELETE FROM session WHERE session_id=?", (session_id,))
|
{
"content_hash": "de7de9acc886d21353a4adb291e7c521",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 90,
"avg_line_length": 34.85427135678392,
"alnum_prop": 0.5415224913494809,
"repo_name": "omni5cience/django-inlineformfield",
"id": "ec96778d0258023f1763dfde6686c979192b7605",
"size": "6936",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": ".tox/py27/lib/python2.7/site-packages/IPython/html/services/sessions/sessionmanager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "43622"
},
{
"name": "Groff",
"bytes": "3667"
},
{
"name": "HTML",
"bytes": "108126"
},
{
"name": "JavaScript",
"bytes": "853457"
},
{
"name": "Python",
"bytes": "10506732"
},
{
"name": "Shell",
"bytes": "3801"
},
{
"name": "Smarty",
"bytes": "21023"
}
],
"symlink_target": ""
}
|
pass
<caret><selection>n = 0
while n:
print("spam")</selection>
pass
|
{
"content_hash": "408adf83fcc9e96c257bd209dcfabc15",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 29,
"avg_line_length": 10.571428571428571,
"alnum_prop": 0.6486486486486487,
"repo_name": "mdaniel/intellij-community",
"id": "635871304aff727052764d6cb7f1ea7bd927e6a8",
"size": "74",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "python/testData/mover/multiLineSelectionDifferentIndentLevelsMoveToEmptyLine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
"""Glance exception subclasses"""
import six
import six.moves.urllib.parse as urlparse
from glance import i18n
_ = i18n._
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class RedirectException(Exception):
def __init__(self, url):
self.url = urlparse.urlparse(url)
class GlanceException(Exception):
"""
Base Glance Exception
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred")
def __init__(self, message=None, *args, **kwargs):
if not message:
message = self.message
try:
if kwargs:
message = message % kwargs
except Exception:
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
# at least get the core message out if something happened
pass
self.msg = message
super(GlanceException, self).__init__(message)
def __unicode__(self):
# NOTE(flwang): By default, self.msg is an instance of Message, which
# can't be converted by str(). Based on the definition of
# __unicode__, it should return unicode always.
return six.text_type(self.msg)
class MissingCredentialError(GlanceException):
message = _("Missing required credential: %(required)s")
class BadAuthStrategy(GlanceException):
message = _("Incorrect auth strategy, expected \"%(expected)s\" but "
"received \"%(received)s\"")
class NotFound(GlanceException):
message = _("An object with the specified identifier was not found.")
class BadStoreUri(GlanceException):
message = _("The Store URI was malformed.")
class Duplicate(GlanceException):
message = _("An object with the same identifier already exists.")
class Conflict(GlanceException):
message = _("An object with the same identifier is currently being "
"operated on.")
class StorageQuotaFull(GlanceException):
message = _("The size of the data %(image_size)s will exceed the limit. "
"%(remaining)s bytes remaining.")
class AuthBadRequest(GlanceException):
message = _("Connect error/bad request to Auth service at URL %(url)s.")
class AuthUrlNotFound(GlanceException):
message = _("Auth service at URL %(url)s not found.")
class AuthorizationFailure(GlanceException):
message = _("Authorization failed.")
class NotAuthenticated(GlanceException):
message = _("You are not authenticated.")
class UploadException(GlanceException):
message = _('Image upload problem: %s')
class Forbidden(GlanceException):
message = _("You are not authorized to complete this action.")
class ForbiddenPublicImage(Forbidden):
message = _("You are not authorized to complete this action.")
class ProtectedImageDelete(Forbidden):
message = _("Image %(image_id)s is protected and cannot be deleted.")
class ProtectedMetadefNamespaceDelete(Forbidden):
message = _("Metadata definition namespace %(namespace)s is protected"
" and cannot be deleted.")
class ProtectedMetadefNamespacePropDelete(Forbidden):
message = _("Metadata definition property %(property_name)s is protected"
" and cannot be deleted.")
class ProtectedMetadefObjectDelete(Forbidden):
message = _("Metadata definition object %(object_name)s is protected"
" and cannot be deleted.")
class ProtectedMetadefResourceTypeAssociationDelete(Forbidden):
message = _("Metadata definition resource-type-association"
" %(resource_type)s is protected and cannot be deleted.")
class ProtectedMetadefResourceTypeSystemDelete(Forbidden):
message = _("Metadata definition resource-type %(resource_type_name)s is"
" a seeded-system type and cannot be deleted.")
class ProtectedMetadefTagDelete(Forbidden):
message = _("Metadata definition tag %(tag_name)s is protected"
" and cannot be deleted.")
class Invalid(GlanceException):
message = _("Data supplied was not valid.")
class InvalidSortKey(Invalid):
message = _("Sort key supplied was not valid.")
class InvalidSortDir(Invalid):
message = _("Sort direction supplied was not valid.")
class InvalidPropertyProtectionConfiguration(Invalid):
message = _("Invalid configuration in property protection file.")
class InvalidSwiftStoreConfiguration(Invalid):
message = _("Invalid configuration in glance-swift conf file.")
class InvalidFilterRangeValue(Invalid):
message = _("Unable to filter using the specified range.")
class InvalidOptionValue(Invalid):
message = _("Invalid value for option %(option)s: %(value)s")
class ReadonlyProperty(Forbidden):
message = _("Attribute '%(property)s' is read-only.")
class ReservedProperty(Forbidden):
message = _("Attribute '%(property)s' is reserved.")
class AuthorizationRedirect(GlanceException):
message = _("Redirecting to %(uri)s for authorization.")
class ClientConnectionError(GlanceException):
message = _("There was an error connecting to a server")
class ClientConfigurationError(GlanceException):
message = _("There was an error configuring the client.")
class MultipleChoices(GlanceException):
message = _("The request returned a 302 Multiple Choices. This generally "
"means that you have not included a version indicator in a "
"request URI.\n\nThe body of response returned:\n%(body)s")
class LimitExceeded(GlanceException):
message = _("The request returned a 413 Request Entity Too Large. This "
"generally means that rate limiting or a quota threshold was "
"breached.\n\nThe response body:\n%(body)s")
def __init__(self, *args, **kwargs):
self.retry_after = (int(kwargs['retry']) if kwargs.get('retry')
else None)
super(LimitExceeded, self).__init__(*args, **kwargs)
class ServiceUnavailable(GlanceException):
message = _("The request returned 503 Service Unavailable. This "
"generally occurs on service overload or other transient "
"outage.")
def __init__(self, *args, **kwargs):
self.retry_after = (int(kwargs['retry']) if kwargs.get('retry')
else None)
super(ServiceUnavailable, self).__init__(*args, **kwargs)
class ServerError(GlanceException):
message = _("The request returned 500 Internal Server Error.")
class UnexpectedStatus(GlanceException):
message = _("The request returned an unexpected status: %(status)s."
"\n\nThe response body:\n%(body)s")
class InvalidContentType(GlanceException):
message = _("Invalid content type %(content_type)s")
class BadRegistryConnectionConfiguration(GlanceException):
message = _("Registry was not configured correctly on API server. "
"Reason: %(reason)s")
class BadDriverConfiguration(GlanceException):
message = _("Driver %(driver_name)s could not be configured correctly. "
"Reason: %(reason)s")
class MaxRedirectsExceeded(GlanceException):
message = _("Maximum redirects (%(redirects)s) was exceeded.")
class InvalidRedirect(GlanceException):
message = _("Received invalid HTTP redirect.")
class NoServiceEndpoint(GlanceException):
message = _("Response from Keystone does not contain a Glance endpoint.")
class RegionAmbiguity(GlanceException):
message = _("Multiple 'image' service matches for region %(region)s. This "
"generally means that a region is required and you have not "
"supplied one.")
class WorkerCreationFailure(GlanceException):
message = _("Server worker creation failed: %(reason)s.")
class SchemaLoadError(GlanceException):
message = _("Unable to load schema: %(reason)s")
class InvalidObject(GlanceException):
message = _("Provided object does not match schema "
"'%(schema)s': %(reason)s")
class UnsupportedHeaderFeature(GlanceException):
message = _("Provided header feature is unsupported: %(feature)s")
class ImageSizeLimitExceeded(GlanceException):
message = _("The provided image is too large.")
class ImageMemberLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"members for this image. Attempted: %(attempted)s, "
"Maximum: %(maximum)s")
class ImagePropertyLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"properties. Attempted: %(attempted)s, Maximum: %(maximum)s")
class ImageTagLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"tags. Attempted: %(attempted)s, Maximum: %(maximum)s")
class ImageLocationLimitExceeded(LimitExceeded):
message = _("The limit has been exceeded on the number of allowed image "
"locations. Attempted: %(attempted)s, Maximum: %(maximum)s")
class SIGHUPInterrupt(GlanceException):
message = _("System SIGHUP signal received.")
class RPCError(GlanceException):
message = _("%(cls)s exception was raised in the last rpc call: %(val)s")
class TaskException(GlanceException):
message = _("An unknown task exception occurred")
class BadTaskConfiguration(GlanceException):
message = _("Task was not configured properly")
class ImageNotFound(NotFound):
message = _("Image with the given id %(image_id)s was not found")
class TaskNotFound(TaskException, NotFound):
message = _("Task with the given id %(task_id)s was not found")
class InvalidTaskStatus(TaskException, Invalid):
message = _("Provided status of task is unsupported: %(status)s")
class InvalidTaskType(TaskException, Invalid):
message = _("Provided type of task is unsupported: %(type)s")
class InvalidTaskStatusTransition(TaskException, Invalid):
message = _("Status transition from %(cur_status)s to"
" %(new_status)s is not allowed")
class DuplicateLocation(Duplicate):
message = _("The location %(location)s already exists")
class ImageDataNotFound(NotFound):
message = _("No image data could be found")
class InvalidParameterValue(Invalid):
message = _("Invalid value '%(value)s' for parameter '%(param)s': "
"%(extra_msg)s")
class InvalidImageStatusTransition(Invalid):
message = _("Image status transition from %(cur_status)s to"
" %(new_status)s is not allowed")
class MetadefDuplicateNamespace(Duplicate):
message = _("The metadata definition namespace=%(namespace_name)s"
" already exists.")
class MetadefDuplicateObject(Duplicate):
message = _("A metadata definition object with name=%(object_name)s"
" already exists in namespace=%(namespace_name)s.")
class MetadefDuplicateProperty(Duplicate):
message = _("A metadata definition property with name=%(property_name)s"
" already exists in namespace=%(namespace_name)s.")
class MetadefDuplicateResourceType(Duplicate):
message = _("A metadata definition resource-type with"
" name=%(resource_type_name)s already exists.")
class MetadefDuplicateResourceTypeAssociation(Duplicate):
message = _("The metadata definition resource-type association of"
" resource-type=%(resource_type_name)s to"
" namespace=%(namespace_name)s"
" already exists.")
class MetadefDuplicateTag(Duplicate):
message = _("A metadata tag with name=%(name)s"
" already exists in namespace=%(namespace_name)s.")
class MetadefForbidden(Forbidden):
message = _("You are not authorized to complete this action.")
class MetadefIntegrityError(Forbidden):
message = _("The metadata definition %(record_type)s with"
" name=%(record_name)s not deleted."
" Other records still refer to it.")
class MetadefNamespaceNotFound(NotFound):
message = _("Metadata definition namespace=%(namespace_name)s"
"was not found.")
class MetadefObjectNotFound(NotFound):
message = _("The metadata definition object with"
" name=%(object_name)s was not found in"
" namespace=%(namespace_name)s.")
class MetadefPropertyNotFound(NotFound):
message = _("The metadata definition property with"
" name=%(property_name)s was not found in"
" namespace=%(namespace_name)s.")
class MetadefResourceTypeNotFound(NotFound):
message = _("The metadata definition resource-type with"
" name=%(resource_type_name)s, was not found.")
class MetadefResourceTypeAssociationNotFound(NotFound):
message = _("The metadata definition resource-type association of"
" resource-type=%(resource_type_name)s to"
" namespace=%(namespace_name)s,"
" was not found.")
class MetadefTagNotFound(NotFound):
message = _("The metadata definition tag with"
" name=%(name)s was not found in"
" namespace=%(namespace_name)s.")
class InvalidVersion(Invalid):
message = _("Version is invalid: %(reason)s")
class InvalidArtifactTypePropertyDefinition(Invalid):
message = _("Invalid property definition")
class InvalidArtifactTypeDefinition(Invalid):
message = _("Invalid type definition")
class InvalidArtifactPropertyValue(Invalid):
message = _("Property '%(name)s' may not have value '%(val)s': %(msg)s")
def __init__(self, message=None, *args, **kwargs):
super(InvalidArtifactPropertyValue, self).__init__(message, *args,
**kwargs)
self.name = kwargs.get('name')
self.value = kwargs.get('val')
class ArtifactNotFound(NotFound):
message = _("Artifact with id=%(id)s was not found")
class ArtifactForbidden(Forbidden):
message = _("Artifact with id=%(id)s is not accessible")
class ArtifactDuplicateNameTypeVersion(Duplicate):
message = _("Artifact with the specified type, name and version"
" already exists")
class InvalidArtifactStateTransition(Invalid):
message = _("Artifact cannot change state from %(source)s to %(target)s")
class ArtifactDuplicateDirectDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the direct dependency=%(dep)s")
class ArtifactDuplicateTransitiveDependency(Duplicate):
message = _("Artifact with the specified type, name and version"
" already has the transitive dependency=%(dep)s")
class ArtifactCircularDependency(Invalid):
message = _("Artifact with a circular dependency can not be created")
class ArtifactUnsupportedPropertyOperator(Invalid):
message = _("Operator %(op)s is not supported")
class ArtifactUnsupportedShowLevel(Invalid):
message = _("Show level %(shl)s is not supported in this operation")
class ArtifactPropertyValueNotFound(NotFound):
message = _("Property's %(prop)s value has not been found")
class ArtifactInvalidProperty(Invalid):
message = _("Artifact has no property %(prop)s")
class ArtifactInvalidPropertyParameter(Invalid):
message = _("Cannot use this parameter with the operator %(op)s")
class ArtifactLoadError(GlanceException):
message = _("Cannot load artifact '%(name)s'")
class ArtifactNonMatchingTypeName(ArtifactLoadError):
message = _(
"Plugin name '%(plugin)s' should match artifact typename '%(name)s'")
class ArtifactPluginNotFound(NotFound):
message = _("No plugin for '%(name)s' has been loaded")
class UnknownArtifactType(NotFound):
message = _("Artifact type with name '%(name)s' and version '%(version)s' "
"is not known")
class ArtifactInvalidStateTransition(Invalid):
message = _("Artifact state cannot be changed from %(curr)s to %(to)s")
class JsonPatchException(GlanceException):
message = _("Invalid jsonpatch request")
class InvalidJsonPatchBody(JsonPatchException):
message = _("The provided body %(body)s is invalid "
"under given schema: %(schema)s")
class InvalidJsonPatchPath(JsonPatchException):
message = _("The provided path '%(path)s' is invalid: %(explanation)s")
def __init__(self, message=None, *args, **kwargs):
self.explanation = kwargs.get("explanation")
super(InvalidJsonPatchPath, self).__init__(message, *args, **kwargs)
|
{
"content_hash": "fe41ef6eb7c4e5c9724d267567b64eb8",
"timestamp": "",
"source": "github",
"line_count": 543,
"max_line_length": 79,
"avg_line_length": 30.813996316758747,
"alnum_prop": 0.6756514463303849,
"repo_name": "ozamiatin/glance",
"id": "b7796fae084ca4bd163aac5ccc5b804b35a7be4d",
"size": "17464",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "glance/common/exception.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3841301"
},
{
"name": "Shell",
"bytes": "7860"
}
],
"symlink_target": ""
}
|
"""
foorep is a forensic repository
"""
import foorep
from bson import json_util
import argparse
import json
def main():
"""
The main entry point for the foorep cli tool
"""
def add(args, repo):
try:
doc = repo.insert(args.path)
except IOError:
print('File not found')
return
if args.verbose:
print('%s added to repository with id %s' %
(doc['meta']['filename'], doc['uuid']))
return
def remove(args, repo):
result = repo.remove(args.uuid)
if not result:
print('No such file in the repository')
return
def search(args, repo):
for sample in repo.search(args.hash):
print sample['created'].strftime('%Y-%m-%d %H:%M:%S'), sample['uuid'], sample['meta']['filename']
return
def dump(args, repo):
doc = repo.get(args.uuid)
if not doc:
print('No such file in the repository')
else:
print(json.dumps(doc, indent=1, default=json_util.default))
return
def annotate(args, repo):
annotation = {"type": args.type, "data":args.message}
repo.annotate(args.uuid, annotation)
return
def list(args, repo):
if args.limit:
limit = int(args.limit)
else:
limit = 10
for sample in repo.list(limit=limit):
print sample['created'].strftime('%Y-%m-%d %H:%M:%S'), sample['uuid'], sample['meta']['filename']
repo = foorep.Repository()
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action='store_true', help='verbose')
subparsers = parser.add_subparsers(dest='command', title='Commands',
description='These are the commands that I know of..')
parser_add = subparsers.add_parser('add', help='Add sample to repository')
parser_add.add_argument('path', help='Path to file')
parser_add.set_defaults(func=add)
parser_remove = subparsers.add_parser('remove', help='Remove sample from repository')
parser_remove.add_argument('uuid', help='File to remove from the repository')
parser_remove.set_defaults(func=remove)
parser_search = subparsers.add_parser('search', help='Search for sample in repository')
parser_search.add_argument('hash', help='Hash to search for')
parser_search.set_defaults(func=search)
parser_dump = subparsers.add_parser('dump', help='Dump raw JSON document for sample')
parser_dump.add_argument('uuid', help='File to dump')
parser_dump.set_defaults(func=dump)
parser_annotate = subparsers.add_parser('annotate', help='Add annotation to sample')
parser_annotate.add_argument('uuid', help='File to annotate')
parser_annotate.add_argument('-t', '--type', metavar='TYPE', help='Type of annotation')
parser_annotate.add_argument('-m', '--message', metavar='VALUE', help='The content of the annotation')
parser_annotate.set_defaults(func=annotate)
parser_list = subparsers.add_parser('list', help='List sample in repository')
parser_list.add_argument('-l', '--limit', help='Limit amount of records returned, default is 10')
parser_list.set_defaults(func=list)
args = parser.parse_args()
args.func(args, repo)
if __name__ == '__main__':
main()
|
{
"content_hash": "93d3a03d1003786dcf6ecd36918c3713",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 110,
"avg_line_length": 38.229885057471265,
"alnum_prop": 0.6256764882742033,
"repo_name": "berggren/foorep",
"id": "b87835a1235bce5eeb76fe2f38ed1706ac44ddde",
"size": "3447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "foorep/cli.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "58516"
},
{
"name": "Python",
"bytes": "21744"
}
],
"symlink_target": ""
}
|
from django.contrib.auth import views as auth_views
from django.urls import path, re_path, reverse_lazy
from django.views.generic.base import TemplateView
from registration.backends.default.views import ActivationView, RegistrationView, ResendActivationView
from registration.forms import RegistrationFormUniqueEmail
from two_factor.views import BackupTokensView, DisableView, LoginView, PhoneDeleteView, PhoneSetupView, QRGeneratorView, SetupCompleteView, SetupView
from .views import SettingsView
urlpatterns = [
path(
'settings/',
SettingsView.as_view(),
name='account_settings'
),
## USER AUTHENTICATION ##
path(
'logout/',
auth_views.LogoutView.as_view(
template_name='auth/logout.html'
),
name='auth_logout',
),
## MFA ##
path(
'login/',
LoginView.as_view(
template_name='auth/login.html'
),
name='auth_login',
),
path(
'two_factor/setup/',
SetupView.as_view(
template_name='mfa/setup.html',
success_url='setup_complete',
qrcode_url='qr'
),
name='setup'
),
path(
'two_factor/qrcode/',
QRGeneratorView.as_view(),
name='qr',
),
path(
'two_factor/setup/complete/',
SetupCompleteView.as_view(
template_name='mfa/setup_complete.html'
),
name='setup_complete',
),
path(
'two_factor/backup/tokens/',
BackupTokensView.as_view(
template_name='mfa/backup_tokens.html',
success_url='backup_tokens'
),
name='backup_tokens',
),
path(
'two_factor/backup/phone/register/',
PhoneSetupView.as_view(
template_name='mfa/phone_register.html'
),
name='phone_create',
),
path(
'account/two_factor/backup/phone/unregister/<int:pk>/',
PhoneDeleteView.as_view(),
name='phone_delete',
),
path(
'account/two_factor/disable/',
view=DisableView.as_view(
template_name='mfa/disable.html'
),
name='disable',
),
## USER REGISTRATION ##
path(
'register/',
RegistrationView.as_view(
form_class=RegistrationFormUniqueEmail,
template_name='registration/form.html',
),
name='registration_register'
),
path(
'register/complete',
TemplateView.as_view(
template_name='registration/complete.html'
),
name='registration_complete'
),
path(
'activate/complete',
TemplateView.as_view(
template_name='registration/activated.html'
),
name='registration_activation_complete'
),
path(
'activate/resend',
ResendActivationView.as_view(
template_name='registration/resend_activation.html'
),
name='registration_resend_activation'
),
path(
'activate/<activation_key>',
ActivationView.as_view(),
name='registration_activate'
),
## PASSWORD MANAGEMENT ##
path(
'password/change/',
auth_views.PasswordChangeView.as_view(
template_name='password_management/password_change.html',
success_url=reverse_lazy('auth_password_change_done')
),
name='auth_password_change'
),
path(
'password/change/done/',
auth_views.PasswordChangeDoneView.as_view(
template_name='password_management/password_change_done.html'
),
name='auth_password_change_done'
),
path(
'password/reset',
auth_views.PasswordResetView.as_view(
html_email_template_name='email/password_reset.html',
template_name='password_management/password_reset.html'
),
name='auth_password_reset'
),
path(
'password/reset/complete',
auth_views.PasswordResetCompleteView.as_view(
template_name='password_management/password_reset_complete.html'
),
name='auth_password_reset_complete'
),
re_path(
r'password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$',
auth_views.PasswordResetConfirmView.as_view(
template_name='password_management/password_reset_confirm.html',
success_url=reverse_lazy('auth_password_reset_complete')
),
name='auth_password_reset_confirm'),
path(
'password/reset/done/',
auth_views.PasswordResetDoneView.as_view(
template_name='password_management/password_reset_done.html'
),
name='password_reset_done'
),
]
|
{
"content_hash": "3a6e1286bfd5901d3089cf2dc3aa6d04",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 149,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.5829493087557603,
"repo_name": "devenney/reverie",
"id": "72bfdc2e1bb4f3e52a2fa1f5593c734ba24c76be",
"size": "4774",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "account/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "671"
},
{
"name": "HTML",
"bytes": "28380"
},
{
"name": "JavaScript",
"bytes": "655"
},
{
"name": "Python",
"bytes": "40182"
}
],
"symlink_target": ""
}
|
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import os
import re
from abc import abstractmethod
from collections import defaultdict
from glob import glob1
from twitter.common.collections import OrderedSet
from pants.util.dirutil import safe_walk
from pants.util.meta import AbstractClass
logger = logging.getLogger(__name__)
# Note: Significant effort has been made to keep the types BuildFile, BuildGraph, Address, and
# Target separated appropriately. Don't add references to those other types to this module.
class BuildFile(AbstractClass):
class BuildFileError(Exception):
"""Base class for all exceptions raised in BuildFile to make exception handling easier"""
pass
class MissingBuildFileError(BuildFileError):
"""Raised when a BUILD file cannot be found at the path in the spec."""
pass
class InvalidRootDirError(BuildFileError):
"""Raised when the root_dir specified to a BUILD file is not valid."""
pass
class BadPathError(BuildFileError):
"""Raised when scan_buildfiles is called on a nonexistent directory."""
pass
_BUILD_FILE_PREFIX = 'BUILD'
_PATTERN = re.compile('^{prefix}(\.[a-zA-Z0-9_-]+)?$'.format(prefix=_BUILD_FILE_PREFIX))
# Subclasses must have an _cache field.
@classmethod
def clear_cache(cls):
cls._cache = {}
@classmethod
def from_cache(cls, root_dir, relpath, must_exist=True):
key = (root_dir, relpath, must_exist)
if key not in cls._cache:
cls._cache[key] = cls(*key)
return cls._cache[key]
@abstractmethod
def _glob1(self, path, glob):
"""Returns a list of paths in path that match glob"""
def _get_all_build_files(self, path):
"""Returns all the BUILD files on a path"""
results = []
for build in self._glob1(path, '{prefix}*'.format(prefix=self._BUILD_FILE_PREFIX)):
if self._is_buildfile_name(build) and self._isfile(os.path.join(path, build)):
results.append(build)
return sorted(results)
@classmethod
def _is_buildfile_name(cls, name):
return cls._PATTERN.match(name)
@classmethod
def scan_buildfiles(cls, root_dir, base_path=None, spec_excludes=None):
"""Looks for all BUILD files
:param root_dir: the root of the repo containing sources
:param base_path: directory under root_dir to scan
:param spec_excludes: list of paths to exclude from the scan. These can be absolute paths
or paths that are relative to the root_dir.
"""
def calc_exclude_roots(root_dir, excludes):
"""Return a map of root directories to subdirectory names suitable for a quick evaluation
inside safe_walk()
"""
result = defaultdict(set)
for exclude in excludes:
if exclude:
if os.path.isabs(exclude):
exclude = os.path.realpath(exclude)
else:
exclude = os.path.join(root_dir, exclude)
if exclude.startswith(root_dir):
result[os.path.dirname(exclude)].add(os.path.basename(exclude))
return result
def find_excluded(root, dirs, exclude_roots):
"""Removes any of the directories specified in exclude_roots from dirs.
"""
to_remove = []
for exclude_root in exclude_roots:
# root ends with a /, trim it off
if root.rstrip('/') == exclude_root:
for subdir in exclude_roots[exclude_root]:
if subdir in dirs:
to_remove.append(subdir)
return to_remove
root_dir = os.path.realpath(root_dir)
if base_path and not cls._isdir(os.path.join(root_dir, base_path)):
raise cls.BadPathError('Can only scan directories and {0} is not a valid dir'
.format(base_path))
buildfiles = []
if not spec_excludes:
exclude_roots = {}
else:
exclude_roots = calc_exclude_roots(root_dir, spec_excludes)
for root, dirs, files in cls._walk(root_dir, base_path or '', topdown=True):
to_remove = find_excluded(root, dirs, exclude_roots)
for subdir in to_remove:
dirs.remove(subdir)
for filename in files:
if cls._is_buildfile_name(filename):
buildfile_relpath = os.path.relpath(os.path.join(root, filename), root_dir)
buildfiles.append(cls.from_cache(root_dir, buildfile_relpath))
return OrderedSet(sorted(buildfiles, key=lambda buildfile: buildfile.full_path))
@abstractmethod
def _walk(self, root_dir, relpath, topdown=False):
"""Walk the file tree rooted at `path`. Works like os.walk"""
@classmethod
def _isdir(cls, path):
"""Returns True if path is a directory"""
raise NotImplementedError()
@classmethod
def _isfile(cls, path):
"""Returns True if path is a file"""
raise NotImplementedError()
@classmethod
def _exists(cls, path):
"""Returns True if path exists"""
raise NotImplementedError()
def __init__(self, root_dir, relpath=None, must_exist=True):
"""Creates a BuildFile object representing the BUILD file family at the specified path.
:param string root_dir: The base directory of the project.
:param string relpath: The path relative to root_dir where the BUILD file is found - this can
either point directly at the BUILD file or else to a directory which contains BUILD files.
:param bool must_exist: If True, at least one BUILD file must exist at the given location or
else an` `MissingBuildFileError` is thrown
:raises IOError: if the root_dir path is not absolute.
:raises MissingBuildFileError: if the path does not house a BUILD file and must_exist is `True`.
"""
if not os.path.isabs(root_dir):
raise self.InvalidRootDirError('BuildFile root_dir {root_dir} must be an absolute path.'
.format(root_dir=root_dir))
self.root_dir = os.path.realpath(root_dir)
path = os.path.join(self.root_dir, relpath) if relpath else self.root_dir
self._build_basename = self._BUILD_FILE_PREFIX
buildfile = os.path.join(path, self._build_basename) if self._isdir(path) else path
# There is no BUILD file without a prefix so select any viable sibling
if not self._exists(buildfile) or self._isdir(buildfile):
for build in self._get_all_build_files(os.path.dirname(buildfile)):
self._build_basename = build
buildfile = os.path.join(path, self._build_basename)
break
if must_exist:
if not self._exists(buildfile):
raise self.MissingBuildFileError('BUILD file does not exist at: {path}'
.format(path=buildfile))
# If a build file must exist then we want to make sure it's not a dir.
# In other cases we are ok with it being a dir, for example someone might have
# repo/scripts/build/doit.sh.
if self._isdir(buildfile):
raise self.MissingBuildFileError('Path to buildfile ({buildfile}) is a directory, '
'but it must be a file.'.format(buildfile=buildfile))
if not self._is_buildfile_name(os.path.basename(buildfile)):
raise self.MissingBuildFileError('{path} is not a BUILD file'
.format(path=buildfile))
self.full_path = os.path.realpath(buildfile)
self.name = os.path.basename(self.full_path)
self.parent_path = os.path.dirname(self.full_path)
self.relpath = os.path.relpath(self.full_path, self.root_dir)
self.spec_path = os.path.dirname(self.relpath)
def file_exists(self):
"""Returns True if this BuildFile corresponds to a real BUILD file on disk."""
return self._isfile(self.full_path)
def descendants(self, spec_excludes=None):
"""Returns all BUILD files in descendant directories of this BUILD file's parent directory."""
descendants = self.scan_buildfiles(self.root_dir, self.parent_path, spec_excludes=spec_excludes)
for sibling in self.family():
descendants.discard(sibling)
return descendants
def ancestors(self):
"""Returns all BUILD files in ancestor directories of this BUILD file's parent directory."""
def find_parent(dir):
parent = os.path.dirname(dir)
for parent_buildfile in self._get_all_build_files(parent):
buildfile = os.path.join(parent, parent_buildfile)
return parent, self.from_cache(self.root_dir, os.path.relpath(buildfile, self.root_dir))
return parent, None
parent_buildfiles = OrderedSet()
def is_root(path):
return os.path.abspath(self.root_dir) == os.path.abspath(path)
parentdir = os.path.dirname(self.full_path)
visited = set()
while parentdir not in visited and not is_root(parentdir):
visited.add(parentdir)
parentdir, buildfile = find_parent(parentdir)
if buildfile:
parent_buildfiles.update(buildfile.family())
return parent_buildfiles
def siblings(self):
"""Returns an iterator over all the BUILD files co-located with this BUILD file not including
this BUILD file itself"""
for build in self._get_all_build_files(self.parent_path):
if self.name != build:
siblingpath = os.path.join(os.path.dirname(self.relpath), build)
yield self.from_cache(self.root_dir, siblingpath)
def family(self):
"""Returns an iterator over all the BUILD files co-located with this BUILD file including this
BUILD file itself. The family forms a single logical BUILD file composed of the canonical BUILD
file if it exists and sibling build files each with their own extension, eg: BUILD.extras."""
yield self
for sibling in self.siblings():
yield sibling
@abstractmethod
def source(self):
"""Returns the source code for this BUILD file."""
def code(self):
"""Returns the code object for this BUILD file."""
return compile(self.source(), self.full_path, 'exec', flags=0, dont_inherit=True)
def __eq__(self, other):
result = other and (
type(other) == type(self)) and (
self.full_path == other.full_path)
return result
def __hash__(self):
return hash(self.full_path)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, self.full_path)
class FilesystemBuildFile(BuildFile):
# TODO(dturner): this cache should really be in BuildFileAddressMapper, but unfortunately this
# class needs to access it, so it can't be moved yet.
_cache = {}
def _glob1(self, path, glob):
return glob1(path, glob)
def source(self):
"""Returns the source code for this BUILD file."""
with open(self.full_path, 'rb') as source:
return source.read()
@classmethod
def _isdir(cls, path):
return os.path.isdir(path)
@classmethod
def _isfile(cls, path):
return os.path.isfile(path)
@classmethod
def _exists(cls, path):
return os.path.exists(path)
@classmethod
def _walk(self, root_dir, relpath, topdown=False):
path = os.path.join(root_dir, relpath)
return safe_walk(path, topdown=True)
|
{
"content_hash": "a172379cb59fca32cc072e4deb1a8a37",
"timestamp": "",
"source": "github",
"line_count": 309,
"max_line_length": 100,
"avg_line_length": 35.94822006472492,
"alnum_prop": 0.6702376665466331,
"repo_name": "scode/pants",
"id": "d393862742934181bf136ba8fb983f4d578f2673",
"size": "11255",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/python/pants/base/build_file.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "767"
},
{
"name": "CSS",
"bytes": "11139"
},
{
"name": "GAP",
"bytes": "2459"
},
{
"name": "Go",
"bytes": "1437"
},
{
"name": "HTML",
"bytes": "69479"
},
{
"name": "Java",
"bytes": "302900"
},
{
"name": "JavaScript",
"bytes": "10157"
},
{
"name": "Protocol Buffer",
"bytes": "3783"
},
{
"name": "Python",
"bytes": "3788845"
},
{
"name": "Scala",
"bytes": "76623"
},
{
"name": "Shell",
"bytes": "49094"
},
{
"name": "Thrift",
"bytes": "2583"
}
],
"symlink_target": ""
}
|
from office365.onedrive.lists.list import List
from office365.runtime.client_object import ClientObject
from office365.runtime.queries.service_operation import ServiceOperationQuery
from office365.runtime.paths.resource_path import ResourcePath
from office365.sharepoint.webs.web import Web
class RemoteWeb(ClientObject):
"""Specifies a remote web that might be on a different domain."""
def get_list_by_server_relative_url(self, server_relative_url):
"""
Returns the list that is associated with the specified server-relative URL.
:param str server_relative_url: A string that contains the site-relative URL for a list.
"""
target_list = List(self.context)
qry = ServiceOperationQuery(self, "GetListByServerRelativeUrl", [server_relative_url], None, None, target_list)
self.context.add_query(qry)
return target_list
@staticmethod
def create(context, request_url):
"""
:type context: ClientContext
:type request_url: str
"""
remote_web = RemoteWeb(context)
qry = ServiceOperationQuery(context, None, [request_url], None, None, remote_web)
qry.static = True
context.add_query(qry)
return remote_web
@property
def web(self):
"""Gets the SPWeb."""
return self.properties.get('Web', Web(self.context, ResourcePath("Web", self.resource_path)))
|
{
"content_hash": "bb9f79df28e291b5100426f69c9f57a4",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 119,
"avg_line_length": 38.4054054054054,
"alnum_prop": 0.6868402533427164,
"repo_name": "vgrem/Office365-REST-Python-Client",
"id": "cce1a9d4e5f1776ba300fe5d5f0916c43ab9a0ff",
"size": "1421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "office365/sharepoint/webs/remote_web.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1659292"
}
],
"symlink_target": ""
}
|
"""
Check the estimated waiting time for an identification
This estimate is done by IDNow using the average of a 10 minute sliding
window, rounded to about 15 second intervals and adding a small buffer.
The estimates seem to take your booked SLA times into account when calculating
a default (in case there were no customers in the last 10 minutes).
In their API-Documentation IDNow makes clear that this is not to be taken
as performance indicator or measurement of their SLA times.
"""
from argparse import ArgumentParser
import logging as log
import requests
__author__ = 'Gregor Tudan'
__version__ = 0.1
def main():
""" Main plugin logic goes here """
# Parse command-line arguments
args = parse_args()
get_waiting_time(args)
gtfo(0)
def parse_args():
""" Parse command-line arguments """
parser = ArgumentParser(usage='usage: check_idnow.py [-v|vv|vvv] [options]',
epilog='check_idnow v.%s by %s' % (__version__, __author__))
# Verbosity (want this first, so it's right after --help and --version)
parser.add_argument('-v', help='Set verbosity level',
action='count', default=0, dest='v')
# CLI arguments specific to this script
group = parser.add_argument_group('Plugin Options')
group.add_argument('-i', '--customer-id', help='IDNow Customer id',
default=None, dest='customer_id')
group.add_argument('-k', '--api-key', help='Your IDNow API key',
default=None, dest='api_key')
group.add_argument('-g', '--gateway-host', help='The hostname of the idnow gateway server',
default='gateway.idnow.de', dest='hostname')
# Common CLI arguments
parser.add_argument('-c', '--critical',
help='The critical waiting time in seconds. Default: %(default)s',
default=600, type=float, dest='crit', metavar='###')
parser.add_argument('-w', '--warning',
help='The threshold waiting time for a warning in seconds. Default: %(default)s',
default=300, type=float, dest='warn', metavar='###')
args = parser.parse_args()
# Set the logging level based on the -v arg
log.getLogger().setLevel([log.ERROR, log.WARN, log.INFO, log.DEBUG][args.v])
log.debug('Parsed arguments: %s', args)
return vars(args)
def get_waiting_time(args):
""" Get the current estimated waiting time"""
try:
url = get_base_url(args['hostname'], args['customer_id'])
request = requests.get(url)
request.raise_for_status()
except requests.exceptions.RequestException as err:
gtfo(3, "UNKNOWN - ERROR: failed to get status: %s" % err)
json = request.json()
log.debug(json)
# Estimated waiting time in seconds
estimated_waiting_time = json['estimatedWaitingTime']
waiting_customers = json['numberOfWaitingChatRequests']
msg = "Estimated waiting time is {0} seconds. There are {1} people waiting.".format(
estimated_waiting_time, waiting_customers)
perf_data = {
'estimated_waiting_time': estimated_waiting_time,
'waiting_customers': waiting_customers
}
if estimated_waiting_time < args['warn']:
gtfo(0, "OK - " + msg, perf_data)
elif estimated_waiting_time >= args['crit']:
gtfo(2, "CRITICAL - " + msg, perf_data)
else:
gtfo(1, "WARN - " + msg, perf_data)
def get_api_token(args):
""" Get an API token from IDNow - the current api for the estimated waiting time
does not seem to require authentication. """
url = get_base_url(args['hostname'], args['customer_id']) + '/login'
payload = {'apiKey': args['api_key']}
request = requests.post(url, json=payload)
request.raise_for_status()
json = request.json()
return json['authToken']
def get_base_url(host_name, customer_id):
"""
:arg host_name: the host name of the IDNow gateway server
:arg customer_id: your customer id
:returns the base url of the IDNow API and the selected customer
"""
return 'https://{0}/api/v1/{1}'.format(host_name, customer_id)
def gtfo(exitcode, message='', perf_data=None):
""" Exit gracefully with exitcode and (optional) message """
if perf_data is None:
perf_data = {}
log.debug('Exiting with status %s. Message: %s', exitcode, message)
perf_string = ''.join('{0}={1} '.format(key, val) for key, val in perf_data.items())
if message:
print("%s | %s" % (message, perf_string))
exit(exitcode)
if __name__ == '__main__':
# Initialize logging before hitting main, in case we need extra debug info
FORMAT = '%(asctime)s - %(funcName)s - %(levelname)s - %(message)s'
log.basicConfig(level=log.DEBUG, format=FORMAT)
main()
|
{
"content_hash": "0824c2324e50f6cbac30e105ec1ece02",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 105,
"avg_line_length": 34.333333333333336,
"alnum_prop": 0.6308613922743235,
"repo_name": "gtudan/check_idnow",
"id": "84b419800397998f5de9e1bcb8e8f0cabf6f22fb",
"size": "4864",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "check_idnow.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5405"
}
],
"symlink_target": ""
}
|
import netaddr
import re
from sqlalchemy.orm import exc
from sqlalchemy import sql
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
import neutron.db.api as db
from neutron.db import models_v2
from neutron.openstack.common import log as logging
from neutron.plugins.cisco.common import cisco_constants as c_const
from neutron.plugins.cisco.common import cisco_exceptions as c_exc
from neutron.plugins.cisco.db import n1kv_models_v2
LOG = logging.getLogger(__name__)
def del_trunk_segment_binding(db_session, trunk_segment_id, segment_pairs):
"""
Delete a trunk network binding.
:param db_session: database session
:param trunk_segment_id: UUID representing the trunk network
:param segment_pairs: List of segment UUIDs in pair
representing the segments that are trunked
"""
with db_session.begin(subtransactions=True):
for (segment_id, dot1qtag) in segment_pairs:
(db_session.query(n1kv_models_v2.N1kvTrunkSegmentBinding).
filter_by(trunk_segment_id=trunk_segment_id,
segment_id=segment_id,
dot1qtag=dot1qtag).delete())
alloc = (db_session.query(n1kv_models_v2.
N1kvTrunkSegmentBinding).
filter_by(trunk_segment_id=trunk_segment_id).first())
if not alloc:
binding = get_network_binding(db_session, trunk_segment_id)
binding.physical_network = None
def del_multi_segment_binding(db_session, multi_segment_id, segment_pairs):
"""
Delete a multi-segment network binding.
:param db_session: database session
:param multi_segment_id: UUID representing the multi-segment network
:param segment_pairs: List of segment UUIDs in pair
representing the segments that are bridged
"""
with db_session.begin(subtransactions=True):
for (segment1_id, segment2_id) in segment_pairs:
(db_session.query(n1kv_models_v2.
N1kvMultiSegmentNetworkBinding).filter_by(
multi_segment_id=multi_segment_id,
segment1_id=segment1_id,
segment2_id=segment2_id).delete())
def add_trunk_segment_binding(db_session, trunk_segment_id, segment_pairs):
"""
Create a trunk network binding.
:param db_session: database session
:param trunk_segment_id: UUID representing the multi-segment network
:param segment_pairs: List of segment UUIDs in pair
representing the segments to be trunked
"""
with db_session.begin(subtransactions=True):
binding = get_network_binding(db_session, trunk_segment_id)
for (segment_id, tag) in segment_pairs:
if not binding.physical_network:
member_seg_binding = get_network_binding(db_session,
segment_id)
binding.physical_network = member_seg_binding.physical_network
trunk_segment_binding = (
n1kv_models_v2.N1kvTrunkSegmentBinding(
trunk_segment_id=trunk_segment_id,
segment_id=segment_id, dot1qtag=tag))
db_session.add(trunk_segment_binding)
def add_multi_segment_binding(db_session, multi_segment_id, segment_pairs):
"""
Create a multi-segment network binding.
:param db_session: database session
:param multi_segment_id: UUID representing the multi-segment network
:param segment_pairs: List of segment UUIDs in pair
representing the segments to be bridged
"""
with db_session.begin(subtransactions=True):
for (segment1_id, segment2_id) in segment_pairs:
multi_segment_binding = (
n1kv_models_v2.N1kvMultiSegmentNetworkBinding(
multi_segment_id=multi_segment_id,
segment1_id=segment1_id,
segment2_id=segment2_id))
db_session.add(multi_segment_binding)
def add_multi_segment_encap_profile_name(db_session, multi_segment_id,
segment_pair, profile_name):
"""
Add the encapsulation profile name to the multi-segment network binding.
:param db_session: database session
:param multi_segment_id: UUID representing the multi-segment network
:param segment_pair: set containing the segment UUIDs that are bridged
"""
with db_session.begin(subtransactions=True):
binding = get_multi_segment_network_binding(db_session,
multi_segment_id,
segment_pair)
binding.encap_profile_name = profile_name
def get_multi_segment_network_binding(db_session,
multi_segment_id, segment_pair):
"""
Retrieve multi-segment network binding.
:param db_session: database session
:param multi_segment_id: UUID representing the trunk network whose binding
is to fetch
:param segment_pair: set containing the segment UUIDs that are bridged
:returns: binding object
"""
try:
(segment1_id, segment2_id) = segment_pair
return (db_session.query(
n1kv_models_v2.N1kvMultiSegmentNetworkBinding).
filter_by(multi_segment_id=multi_segment_id,
segment1_id=segment1_id,
segment2_id=segment2_id)).one()
except exc.NoResultFound:
raise c_exc.NetworkBindingNotFound(network_id=multi_segment_id)
def get_multi_segment_members(db_session, multi_segment_id):
"""
Retrieve all the member segments of a multi-segment network.
:param db_session: database session
:param multi_segment_id: UUID representing the multi-segment network
:returns: a list of tuples representing the mapped segments
"""
with db_session.begin(subtransactions=True):
allocs = (db_session.query(
n1kv_models_v2.N1kvMultiSegmentNetworkBinding).
filter_by(multi_segment_id=multi_segment_id))
return [(a.segment1_id, a.segment2_id) for a in allocs]
def get_multi_segment_encap_dict(db_session, multi_segment_id):
"""
Retrieve the encapsulation profiles for every segment pairs bridged.
:param db_session: database session
:param multi_segment_id: UUID representing the multi-segment network
:returns: a dictionary of lists containing the segment pairs in sets
"""
with db_session.begin(subtransactions=True):
encap_dict = {}
allocs = (db_session.query(
n1kv_models_v2.N1kvMultiSegmentNetworkBinding).
filter_by(multi_segment_id=multi_segment_id))
for alloc in allocs:
if alloc.encap_profile_name not in encap_dict:
encap_dict[alloc.encap_profile_name] = []
seg_pair = (alloc.segment1_id, alloc.segment2_id)
encap_dict[alloc.encap_profile_name].append(seg_pair)
return encap_dict
def get_trunk_network_binding(db_session, trunk_segment_id, segment_pair):
"""
Retrieve trunk network binding.
:param db_session: database session
:param trunk_segment_id: UUID representing the trunk network whose binding
is to fetch
:param segment_pair: set containing the segment_id and dot1qtag
:returns: binding object
"""
try:
(segment_id, dot1qtag) = segment_pair
return (db_session.query(n1kv_models_v2.N1kvTrunkSegmentBinding).
filter_by(trunk_segment_id=trunk_segment_id,
segment_id=segment_id,
dot1qtag=dot1qtag)).one()
except exc.NoResultFound:
raise c_exc.NetworkBindingNotFound(network_id=trunk_segment_id)
def get_trunk_members(db_session, trunk_segment_id):
"""
Retrieve all the member segments of a trunk network.
:param db_session: database session
:param trunk_segment_id: UUID representing the trunk network
:returns: a list of tuples representing the segment and their
corresponding dot1qtag
"""
with db_session.begin(subtransactions=True):
allocs = (db_session.query(n1kv_models_v2.N1kvTrunkSegmentBinding).
filter_by(trunk_segment_id=trunk_segment_id))
return [(a.segment_id, a.dot1qtag) for a in allocs]
def is_trunk_member(db_session, segment_id):
"""
Checks if a segment is a member of a trunk segment.
:param db_session: database session
:param segment_id: UUID of the segment to be checked
:returns: boolean
"""
with db_session.begin(subtransactions=True):
ret = (db_session.query(n1kv_models_v2.N1kvTrunkSegmentBinding).
filter_by(segment_id=segment_id).first())
return bool(ret)
def is_multi_segment_member(db_session, segment_id):
"""
Checks if a segment is a member of a multi-segment network.
:param db_session: database session
:param segment_id: UUID of the segment to be checked
:returns: boolean
"""
with db_session.begin(subtransactions=True):
ret1 = (db_session.query(
n1kv_models_v2.N1kvMultiSegmentNetworkBinding).
filter_by(segment1_id=segment_id).first())
ret2 = (db_session.query(
n1kv_models_v2.N1kvMultiSegmentNetworkBinding).
filter_by(segment2_id=segment_id).first())
return bool(ret1 or ret2)
def get_network_binding(db_session, network_id):
"""
Retrieve network binding.
:param db_session: database session
:param network_id: UUID representing the network whose binding is
to fetch
:returns: binding object
"""
try:
return (db_session.query(n1kv_models_v2.N1kvNetworkBinding).
filter_by(network_id=network_id).
one())
except exc.NoResultFound:
raise c_exc.NetworkBindingNotFound(network_id=network_id)
def add_network_binding(db_session, network_id, network_type,
physical_network, segmentation_id,
multicast_ip, network_profile_id, add_segments):
"""
Create network binding.
:param db_session: database session
:param network_id: UUID representing the network
:param network_type: string representing type of network (VLAN, OVERLAY,
MULTI_SEGMENT or TRUNK)
:param physical_network: Only applicable for VLAN networks. It
represents a L2 Domain
:param segmentation_id: integer representing VLAN or VXLAN ID
:param multicast_ip: Native VXLAN technology needs a multicast IP to be
associated with every VXLAN ID to deal with broadcast
packets. A single multicast IP can be shared by
multiple VXLAN IDs.
:param network_profile_id: network profile ID based on which this network
is created
:param add_segments: List of segment UUIDs in pairs to be added to either a
multi-segment or trunk network
"""
with db_session.begin(subtransactions=True):
binding = n1kv_models_v2.N1kvNetworkBinding(
network_id=network_id,
network_type=network_type,
physical_network=physical_network,
segmentation_id=segmentation_id,
multicast_ip=multicast_ip,
profile_id=network_profile_id)
db_session.add(binding)
if add_segments is None:
pass
elif network_type == c_const.NETWORK_TYPE_MULTI_SEGMENT:
add_multi_segment_binding(db_session, network_id, add_segments)
elif network_type == c_const.NETWORK_TYPE_TRUNK:
add_trunk_segment_binding(db_session, network_id, add_segments)
def get_segment_range(network_profile):
"""
Get the segment range min and max for a network profile.
:params network_profile: object of type network profile
:returns: integer values representing minimum and maximum segment
range value
"""
# Sort the range to ensure min, max is in order
seg_min, seg_max = sorted(
int(i) for i in network_profile.segment_range.split('-'))
LOG.debug(_("seg_min %(seg_min)s, seg_max %(seg_max)s"),
{'seg_min': seg_min, 'seg_max': seg_max})
return seg_min, seg_max
def get_multicast_ip(network_profile):
"""
Retrieve a multicast ip from the defined pool.
:params network_profile: object of type network profile
:returns: string representing multicast IP
"""
# Round robin multicast ip allocation
min_ip, max_ip = _get_multicast_ip_range(network_profile)
addr_list = list((netaddr.iter_iprange(min_ip, max_ip)))
mul_ip_str = str(addr_list[network_profile.multicast_ip_index])
network_profile.multicast_ip_index += 1
if network_profile.multicast_ip_index == len(addr_list):
network_profile.multicast_ip_index = 0
return mul_ip_str
def _get_multicast_ip_range(network_profile):
"""
Helper method to retrieve minimum and maximum multicast ip.
:params network_profile: object of type network profile
:returns: two strings representing minimum multicast ip and
maximum multicast ip
"""
# Assumption: ip range belongs to the same subnet
# Assumption: ip range is already sorted
return network_profile.multicast_ip_range.split('-')
def get_port_binding(db_session, port_id):
"""
Retrieve port binding.
:param db_session: database session
:param port_id: UUID representing the port whose binding is to fetch
:returns: port binding object
"""
try:
return (db_session.query(n1kv_models_v2.N1kvPortBinding).
filter_by(port_id=port_id).
one())
except exc.NoResultFound:
raise c_exc.PortBindingNotFound(port_id=port_id)
def add_port_binding(db_session, port_id, policy_profile_id):
"""
Create port binding.
Bind the port with policy profile.
:param db_session: database session
:param port_id: UUID of the port
:param policy_profile_id: UUID of the policy profile
"""
with db_session.begin(subtransactions=True):
binding = n1kv_models_v2.N1kvPortBinding(port_id=port_id,
profile_id=policy_profile_id)
db_session.add(binding)
def delete_segment_allocations(db_session, net_p):
"""
Delete the segment allocation entry from the table.
:params db_session: database session
:params net_p: network profile object
"""
with db_session.begin(subtransactions=True):
seg_min, seg_max = get_segment_range(net_p)
if net_p['segment_type'] == c_const.NETWORK_TYPE_VLAN:
db_session.query(n1kv_models_v2.N1kvVlanAllocation).filter(
(n1kv_models_v2.N1kvVlanAllocation.physical_network ==
net_p['physical_network']),
(n1kv_models_v2.N1kvVlanAllocation.vlan_id >= seg_min),
(n1kv_models_v2.N1kvVlanAllocation.vlan_id <=
seg_max)).delete()
elif net_p['segment_type'] == c_const.NETWORK_TYPE_OVERLAY:
db_session.query(n1kv_models_v2.N1kvVxlanAllocation).filter(
(n1kv_models_v2.N1kvVxlanAllocation.vxlan_id >= seg_min),
(n1kv_models_v2.N1kvVxlanAllocation.vxlan_id <=
seg_max)).delete()
def sync_vlan_allocations(db_session, net_p):
"""
Synchronize vlan_allocations table with configured VLAN ranges.
Sync the network profile range with the vlan_allocations table for each
physical network.
:param db_session: database session
:param net_p: network profile dictionary
"""
with db_session.begin(subtransactions=True):
seg_min, seg_max = get_segment_range(net_p)
for vlan_id in range(seg_min, seg_max + 1):
try:
get_vlan_allocation(db_session,
net_p['physical_network'],
vlan_id)
except c_exc.VlanIDNotFound:
alloc = n1kv_models_v2.N1kvVlanAllocation(
physical_network=net_p['physical_network'],
vlan_id=vlan_id,
network_profile_id=net_p['id'])
db_session.add(alloc)
def get_vlan_allocation(db_session, physical_network, vlan_id):
"""
Retrieve vlan allocation.
:param db_session: database session
:param physical network: string name for the physical network
:param vlan_id: integer representing the VLAN ID.
:returns: allocation object for given physical network and VLAN ID
"""
try:
return (db_session.query(n1kv_models_v2.N1kvVlanAllocation).
filter_by(physical_network=physical_network,
vlan_id=vlan_id).one())
except exc.NoResultFound:
raise c_exc.VlanIDNotFound(vlan_id=vlan_id)
def reserve_vlan(db_session, network_profile):
"""
Reserve a VLAN ID within the range of the network profile.
:param db_session: database session
:param network_profile: network profile object
"""
seg_min, seg_max = get_segment_range(network_profile)
segment_type = c_const.NETWORK_TYPE_VLAN
with db_session.begin(subtransactions=True):
alloc = (db_session.query(n1kv_models_v2.N1kvVlanAllocation).
filter(sql.and_(
n1kv_models_v2.N1kvVlanAllocation.vlan_id >= seg_min,
n1kv_models_v2.N1kvVlanAllocation.vlan_id <= seg_max,
n1kv_models_v2.N1kvVlanAllocation.physical_network ==
network_profile['physical_network'],
n1kv_models_v2.N1kvVlanAllocation.allocated ==
sql.false())
)).first()
if alloc:
segment_id = alloc.vlan_id
physical_network = alloc.physical_network
alloc.allocated = True
return (physical_network, segment_type, segment_id, "0.0.0.0")
raise c_exc.NoMoreNetworkSegments(
network_profile_name=network_profile.name)
def reserve_vxlan(db_session, network_profile):
"""
Reserve a VXLAN ID within the range of the network profile.
:param db_session: database session
:param network_profile: network profile object
"""
seg_min, seg_max = get_segment_range(network_profile)
segment_type = c_const.NETWORK_TYPE_OVERLAY
physical_network = ""
with db_session.begin(subtransactions=True):
alloc = (db_session.query(n1kv_models_v2.N1kvVxlanAllocation).
filter(sql.and_(
n1kv_models_v2.N1kvVxlanAllocation.vxlan_id >=
seg_min,
n1kv_models_v2.N1kvVxlanAllocation.vxlan_id <=
seg_max,
n1kv_models_v2.N1kvVxlanAllocation.allocated ==
sql.false())
).first())
if alloc:
segment_id = alloc.vxlan_id
alloc.allocated = True
if network_profile.sub_type == (c_const.
NETWORK_SUBTYPE_NATIVE_VXLAN):
return (physical_network, segment_type,
segment_id, get_multicast_ip(network_profile))
else:
return (physical_network, segment_type, segment_id, "0.0.0.0")
raise n_exc.NoNetworkAvailable()
def alloc_network(db_session, network_profile_id):
"""
Allocate network using first available free segment ID in segment range.
:param db_session: database session
:param network_profile_id: UUID representing the network profile
"""
with db_session.begin(subtransactions=True):
network_profile = get_network_profile(db_session,
network_profile_id)
if network_profile.segment_type == c_const.NETWORK_TYPE_VLAN:
return reserve_vlan(db_session, network_profile)
if network_profile.segment_type == c_const.NETWORK_TYPE_OVERLAY:
return reserve_vxlan(db_session, network_profile)
return (None, network_profile.segment_type, 0, "0.0.0.0")
def reserve_specific_vlan(db_session, physical_network, vlan_id):
"""
Reserve a specific VLAN ID for the network.
:param db_session: database session
:param physical_network: string representing the name of physical network
:param vlan_id: integer value of the segmentation ID to be reserved
"""
with db_session.begin(subtransactions=True):
try:
alloc = (db_session.query(n1kv_models_v2.N1kvVlanAllocation).
filter_by(physical_network=physical_network,
vlan_id=vlan_id).
one())
if alloc.allocated:
if vlan_id == c_const.FLAT_VLAN_ID:
raise n_exc.FlatNetworkInUse(
physical_network=physical_network)
else:
raise n_exc.VlanIdInUse(vlan_id=vlan_id,
physical_network=physical_network)
LOG.debug(_("Reserving specific vlan %(vlan)s on physical "
"network %(network)s from pool"),
{"vlan": vlan_id, "network": physical_network})
alloc.allocated = True
db_session.add(alloc)
except exc.NoResultFound:
raise c_exc.VlanIDOutsidePool
def release_vlan(db_session, physical_network, vlan_id):
"""
Release a given VLAN ID.
:param db_session: database session
:param physical_network: string representing the name of physical network
:param vlan_id: integer value of the segmentation ID to be released
"""
with db_session.begin(subtransactions=True):
try:
alloc = (db_session.query(n1kv_models_v2.N1kvVlanAllocation).
filter_by(physical_network=physical_network,
vlan_id=vlan_id).
one())
alloc.allocated = False
except exc.NoResultFound:
LOG.warning(_("vlan_id %(vlan)s on physical network %(network)s "
"not found"),
{"vlan": vlan_id, "network": physical_network})
def sync_vxlan_allocations(db_session, net_p):
"""
Synchronize vxlan_allocations table with configured vxlan ranges.
:param db_session: database session
:param net_p: network profile dictionary
"""
seg_min, seg_max = get_segment_range(net_p)
if seg_max + 1 - seg_min > c_const.MAX_VXLAN_RANGE:
msg = (_("Unreasonable vxlan ID range %(vxlan_min)s - %(vxlan_max)s"),
{"vxlan_min": seg_min, "vxlan_max": seg_max})
raise n_exc.InvalidInput(error_message=msg)
with db_session.begin(subtransactions=True):
for vxlan_id in range(seg_min, seg_max + 1):
try:
get_vxlan_allocation(db_session, vxlan_id)
except c_exc.VxlanIDNotFound:
alloc = n1kv_models_v2.N1kvVxlanAllocation(
network_profile_id=net_p['id'], vxlan_id=vxlan_id)
db_session.add(alloc)
def get_vxlan_allocation(db_session, vxlan_id):
"""
Retrieve VXLAN allocation for the given VXLAN ID.
:param db_session: database session
:param vxlan_id: integer value representing the segmentation ID
:returns: allocation object
"""
try:
return (db_session.query(n1kv_models_v2.N1kvVxlanAllocation).
filter_by(vxlan_id=vxlan_id).one())
except exc.NoResultFound:
raise c_exc.VxlanIDNotFound(vxlan_id=vxlan_id)
def reserve_specific_vxlan(db_session, vxlan_id):
"""
Reserve a specific VXLAN ID.
:param db_session: database session
:param vxlan_id: integer value representing the segmentation ID
"""
with db_session.begin(subtransactions=True):
try:
alloc = (db_session.query(n1kv_models_v2.N1kvVxlanAllocation).
filter_by(vxlan_id=vxlan_id).
one())
if alloc.allocated:
raise c_exc.VxlanIDInUse(vxlan_id=vxlan_id)
LOG.debug(_("Reserving specific vxlan %s from pool"), vxlan_id)
alloc.allocated = True
db_session.add(alloc)
except exc.NoResultFound:
raise c_exc.VxlanIDOutsidePool
def release_vxlan(db_session, vxlan_id):
"""
Release a given VXLAN ID.
:param db_session: database session
:param vxlan_id: integer value representing the segmentation ID
"""
with db_session.begin(subtransactions=True):
try:
alloc = (db_session.query(n1kv_models_v2.N1kvVxlanAllocation).
filter_by(vxlan_id=vxlan_id).
one())
alloc.allocated = False
except exc.NoResultFound:
LOG.warning(_("vxlan_id %s not found"), vxlan_id)
def set_port_status(port_id, status):
"""
Set the status of the port.
:param port_id: UUID representing the port
:param status: string representing the new status
"""
db_session = db.get_session()
try:
port = db_session.query(models_v2.Port).filter_by(id=port_id).one()
port.status = status
except exc.NoResultFound:
raise n_exc.PortNotFound(port_id=port_id)
def get_vm_network(db_session, policy_profile_id, network_id):
"""
Retrieve a vm_network based on policy profile and network id.
:param db_session: database session
:param policy_profile_id: UUID representing policy profile
:param network_id: UUID representing network
:returns: VM network object
"""
try:
return (db_session.query(n1kv_models_v2.N1kVmNetwork).
filter_by(profile_id=policy_profile_id,
network_id=network_id).one())
except exc.NoResultFound:
name = (c_const.VM_NETWORK_NAME_PREFIX + policy_profile_id
+ "_" + network_id)
raise c_exc.VMNetworkNotFound(name=name)
def add_vm_network(db_session,
name,
policy_profile_id,
network_id,
port_count):
"""
Create a VM network.
Add a VM network for a unique combination of network and
policy profile. All ports having the same policy profile
on one network will be associated with one VM network.
:param db_session: database session
:param name: string representing the name of the VM network
:param policy_profile_id: UUID representing policy profile
:param network_id: UUID representing a network
:param port_count: integer representing the number of ports on vm network
"""
with db_session.begin(subtransactions=True):
vm_network = n1kv_models_v2.N1kVmNetwork(
name=name,
profile_id=policy_profile_id,
network_id=network_id,
port_count=port_count)
db_session.add(vm_network)
def update_vm_network_port_count(db_session, name, port_count):
"""
Update a VM network with new port count.
:param db_session: database session
:param name: string representing the name of the VM network
:param port_count: integer representing the number of ports on VM network
"""
try:
with db_session.begin(subtransactions=True):
vm_network = (db_session.query(n1kv_models_v2.N1kVmNetwork).
filter_by(name=name).one())
if port_count is not None:
vm_network.port_count = port_count
return vm_network
except exc.NoResultFound:
raise c_exc.VMNetworkNotFound(name=name)
def delete_vm_network(db_session, policy_profile_id, network_id):
"""
Delete a VM network.
:param db_session: database session
:param policy_profile_id: UUID representing a policy profile
:param network_id: UUID representing a network
:returns: deleted VM network object
"""
with db_session.begin(subtransactions=True):
try:
vm_network = get_vm_network(db_session,
policy_profile_id,
network_id)
db_session.delete(vm_network)
db_session.query(n1kv_models_v2.N1kVmNetwork).filter_by(
name=vm_network["name"]).delete()
return vm_network
except exc.NoResultFound:
name = (c_const.VM_NETWORK_NAME_PREFIX + policy_profile_id +
"_" + network_id)
raise c_exc.VMNetworkNotFound(name=name)
def create_network_profile(db_session, network_profile):
"""Create a network profile."""
LOG.debug(_("create_network_profile()"))
with db_session.begin(subtransactions=True):
kwargs = {"name": network_profile["name"],
"segment_type": network_profile["segment_type"]}
if network_profile["segment_type"] == c_const.NETWORK_TYPE_VLAN:
kwargs["physical_network"] = network_profile["physical_network"]
kwargs["segment_range"] = network_profile["segment_range"]
elif network_profile["segment_type"] == c_const.NETWORK_TYPE_OVERLAY:
kwargs["multicast_ip_index"] = 0
kwargs["multicast_ip_range"] = network_profile[
"multicast_ip_range"]
kwargs["segment_range"] = network_profile["segment_range"]
kwargs["sub_type"] = network_profile["sub_type"]
elif network_profile["segment_type"] == c_const.NETWORK_TYPE_TRUNK:
kwargs["sub_type"] = network_profile["sub_type"]
net_profile = n1kv_models_v2.NetworkProfile(**kwargs)
db_session.add(net_profile)
return net_profile
def delete_network_profile(db_session, id):
"""Delete Network Profile."""
LOG.debug(_("delete_network_profile()"))
with db_session.begin(subtransactions=True):
try:
network_profile = get_network_profile(db_session, id)
db_session.delete(network_profile)
(db_session.query(n1kv_models_v2.ProfileBinding).
filter_by(profile_id=id).delete())
return network_profile
except exc.NoResultFound:
raise c_exc.ProfileTenantBindingNotFound(profile_id=id)
def update_network_profile(db_session, id, network_profile):
"""Update Network Profile."""
LOG.debug(_("update_network_profile()"))
with db_session.begin(subtransactions=True):
profile = get_network_profile(db_session, id)
profile.update(network_profile)
return profile
def get_network_profile(db_session, id):
"""Get Network Profile."""
LOG.debug(_("get_network_profile()"))
try:
return db_session.query(
n1kv_models_v2.NetworkProfile).filter_by(id=id).one()
except exc.NoResultFound:
raise c_exc.NetworkProfileNotFound(profile=id)
def _get_network_profiles(db_session=None, physical_network=None):
"""
Retrieve all network profiles.
Get Network Profiles on a particular physical network, if physical
network is specified. If no physical network is specified, return
all network profiles.
"""
db_session = db_session or db.get_session()
if physical_network:
return (db_session.query(n1kv_models_v2.NetworkProfile).
filter_by(physical_network=physical_network))
return db_session.query(n1kv_models_v2.NetworkProfile)
def create_policy_profile(policy_profile):
"""Create Policy Profile."""
LOG.debug(_("create_policy_profile()"))
db_session = db.get_session()
with db_session.begin(subtransactions=True):
p_profile = n1kv_models_v2.PolicyProfile(id=policy_profile["id"],
name=policy_profile["name"])
db_session.add(p_profile)
return p_profile
def delete_policy_profile(id):
"""Delete Policy Profile."""
LOG.debug(_("delete_policy_profile()"))
db_session = db.get_session()
with db_session.begin(subtransactions=True):
policy_profile = get_policy_profile(db_session, id)
db_session.delete(policy_profile)
def update_policy_profile(db_session, id, policy_profile):
"""Update a policy profile."""
LOG.debug(_("update_policy_profile()"))
with db_session.begin(subtransactions=True):
_profile = get_policy_profile(db_session, id)
_profile.update(policy_profile)
return _profile
def get_policy_profile(db_session, id):
"""Get Policy Profile."""
LOG.debug(_("get_policy_profile()"))
try:
return db_session.query(
n1kv_models_v2.PolicyProfile).filter_by(id=id).one()
except exc.NoResultFound:
raise c_exc.PolicyProfileIdNotFound(profile_id=id)
def get_policy_profiles():
"""Retrieve all policy profiles."""
db_session = db.get_session()
with db_session.begin(subtransactions=True):
return db_session.query(n1kv_models_v2.PolicyProfile)
def create_profile_binding(db_session, tenant_id, profile_id, profile_type):
"""Create Network/Policy Profile association with a tenant."""
db_session = db_session or db.get_session()
if profile_type not in ["network", "policy"]:
raise n_exc.NeutronException(_("Invalid profile type"))
if _profile_binding_exists(db_session,
tenant_id,
profile_id,
profile_type):
return get_profile_binding(db_session, tenant_id, profile_id)
with db_session.begin(subtransactions=True):
binding = n1kv_models_v2.ProfileBinding(profile_type=profile_type,
profile_id=profile_id,
tenant_id=tenant_id)
db_session.add(binding)
return binding
def _profile_binding_exists(db_session, tenant_id, profile_id, profile_type):
LOG.debug(_("_profile_binding_exists()"))
return (db_session.query(n1kv_models_v2.ProfileBinding).
filter_by(tenant_id=tenant_id, profile_id=profile_id,
profile_type=profile_type).first())
def get_profile_binding(db_session, tenant_id, profile_id):
"""Get Network/Policy Profile - Tenant binding."""
LOG.debug(_("get_profile_binding()"))
try:
return (db_session.query(n1kv_models_v2.ProfileBinding).filter_by(
tenant_id=tenant_id, profile_id=profile_id).one())
except exc.NoResultFound:
raise c_exc.ProfileTenantBindingNotFound(profile_id=profile_id)
def delete_profile_binding(db_session, tenant_id, profile_id):
"""Delete Policy Binding."""
LOG.debug(_("delete_profile_binding()"))
db_session = db_session or db.get_session()
try:
binding = get_profile_binding(db_session, tenant_id, profile_id)
with db_session.begin(subtransactions=True):
db_session.delete(binding)
except c_exc.ProfileTenantBindingNotFound:
LOG.debug(_("Profile-Tenant binding missing for profile ID "
"%(profile_id)s and tenant ID %(tenant_id)s"),
{"profile_id": profile_id, "tenant_id": tenant_id})
return
def _get_profile_bindings(db_session, profile_type=None):
"""
Retrieve a list of profile bindings.
Get all profile-tenant bindings based on profile type.
If profile type is None, return profile-tenant binding for all
profile types.
"""
LOG.debug(_("_get_profile_bindings()"))
if profile_type:
profile_bindings = (db_session.query(n1kv_models_v2.ProfileBinding).
filter_by(profile_type=profile_type))
return profile_bindings
return db_session.query(n1kv_models_v2.ProfileBinding)
class NetworkProfile_db_mixin(object):
"""Network Profile Mixin."""
def _replace_fake_tenant_id_with_real(self, context):
"""
Replace default tenant-id with admin tenant-ids.
Default tenant-ids are populated in profile bindings when plugin is
initialized. Replace these tenant-ids with admin's tenant-id.
:param context: neutron api request context
"""
if context.is_admin and context.tenant_id:
tenant_id = context.tenant_id
db_session = context.session
with db_session.begin(subtransactions=True):
(db_session.query(n1kv_models_v2.ProfileBinding).
filter_by(tenant_id=c_const.TENANT_ID_NOT_SET).
update({'tenant_id': tenant_id}))
def _get_network_collection_for_tenant(self, db_session, model, tenant_id):
net_profile_ids = (db_session.query(n1kv_models_v2.ProfileBinding.
profile_id).
filter_by(tenant_id=tenant_id).
filter_by(profile_type=c_const.NETWORK))
network_profiles = (db_session.query(model).filter(model.id.in_(
pid[0] for pid in net_profile_ids)))
return [self._make_network_profile_dict(p) for p in network_profiles]
def _make_profile_bindings_dict(self, profile_binding, fields=None):
res = {"profile_id": profile_binding["profile_id"],
"tenant_id": profile_binding["tenant_id"]}
return self._fields(res, fields)
def _make_network_profile_dict(self, network_profile, fields=None):
res = {"id": network_profile["id"],
"name": network_profile["name"],
"segment_type": network_profile["segment_type"],
"sub_type": network_profile["sub_type"],
"segment_range": network_profile["segment_range"],
"multicast_ip_index": network_profile["multicast_ip_index"],
"multicast_ip_range": network_profile["multicast_ip_range"],
"physical_network": network_profile["physical_network"]}
return self._fields(res, fields)
def _segment_in_use(self, db_session, network_profile):
"""Verify whether a segment is allocated for given network profile."""
with db_session.begin(subtransactions=True):
return (db_session.query(n1kv_models_v2.N1kvNetworkBinding).
filter_by(profile_id=network_profile['id'])).first()
def get_network_profile_bindings(self, context, filters=None, fields=None):
"""
Retrieve a list of profile bindings for network profiles.
:param context: neutron api request context
:param filters: a dictionary with keys that are valid keys for a
profile bindings object. Values in this dictiontary are
an iterable containing values that will be used for an
exact match comparison for that value. Each result
returned by this function will have matched one of the
values for each key in filters
:params fields: a list of strings that are valid keys in a profile
bindings dictionary. Only these fields will be returned
:returns: list of profile bindings
"""
if context.is_admin:
profile_bindings = _get_profile_bindings(
context.session,
profile_type=c_const.NETWORK)
return [self._make_profile_bindings_dict(pb)
for pb in profile_bindings]
def create_network_profile(self, context, network_profile):
"""
Create a network profile.
:param context: neutron api request context
:param network_profile: network profile dictionary
:returns: network profile dictionary
"""
self._replace_fake_tenant_id_with_real(context)
p = network_profile["network_profile"]
self._validate_network_profile_args(context, p)
with context.session.begin(subtransactions=True):
net_profile = create_network_profile(context.session, p)
if net_profile.segment_type == c_const.NETWORK_TYPE_VLAN:
sync_vlan_allocations(context.session, net_profile)
elif net_profile.segment_type == c_const.NETWORK_TYPE_OVERLAY:
sync_vxlan_allocations(context.session, net_profile)
create_profile_binding(context.session,
context.tenant_id,
net_profile.id,
c_const.NETWORK)
if p.get("add_tenant"):
self.add_network_profile_tenant(context.session,
net_profile.id,
p["add_tenant"])
return self._make_network_profile_dict(net_profile)
def delete_network_profile(self, context, id):
"""
Delete a network profile.
:param context: neutron api request context
:param id: UUID representing network profile to delete
:returns: deleted network profile dictionary
"""
# Check whether the network profile is in use.
if self._segment_in_use(context.session,
get_network_profile(context.session, id)):
raise c_exc.NetworkProfileInUse(profile=id)
# Delete and return the network profile if it is not in use.
_profile = delete_network_profile(context.session, id)
return self._make_network_profile_dict(_profile)
def update_network_profile(self, context, id, network_profile):
"""
Update a network profile.
Add/remove network profile to tenant-id binding for the corresponding
options and if user is admin.
:param context: neutron api request context
:param id: UUID representing network profile to update
:param network_profile: network profile dictionary
:returns: updated network profile dictionary
"""
# Flag to check whether network profile is updated or not.
is_updated = False
p = network_profile["network_profile"]
original_net_p = get_network_profile(context.session, id)
# Update network profile to tenant id binding.
if context.is_admin and "add_tenant" in p:
self.add_network_profile_tenant(context.session, id,
p["add_tenant"])
is_updated = True
if context.is_admin and "remove_tenant" in p:
delete_profile_binding(context.session, p["remove_tenant"], id)
is_updated = True
if original_net_p.segment_type == c_const.NETWORK_TYPE_TRUNK:
#TODO(abhraut): Remove check when Trunk supports segment range.
if p.get('segment_range'):
msg = _("segment_range not required for TRUNK")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if original_net_p.segment_type in [c_const.NETWORK_TYPE_VLAN,
c_const.NETWORK_TYPE_TRUNK]:
if p.get("multicast_ip_range"):
msg = _("multicast_ip_range not required")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
# Update segment range if network profile is not in use.
if (p.get("segment_range") and
p.get("segment_range") != original_net_p.segment_range):
if not self._segment_in_use(context.session, original_net_p):
delete_segment_allocations(context.session, original_net_p)
updated_net_p = update_network_profile(context.session, id, p)
self._validate_segment_range_uniqueness(context,
updated_net_p, id)
if original_net_p.segment_type == c_const.NETWORK_TYPE_VLAN:
sync_vlan_allocations(context.session, updated_net_p)
if original_net_p.segment_type == c_const.NETWORK_TYPE_OVERLAY:
sync_vxlan_allocations(context.session, updated_net_p)
is_updated = True
else:
raise c_exc.NetworkProfileInUse(profile=id)
if (p.get('multicast_ip_range') and
(p.get("multicast_ip_range") !=
original_net_p.get("multicast_ip_range"))):
self._validate_multicast_ip_range(p)
if not self._segment_in_use(context.session, original_net_p):
is_updated = True
else:
raise c_exc.NetworkProfileInUse(profile=id)
# Update network profile if name is updated and the network profile
# is not yet updated.
if "name" in p and not is_updated:
is_updated = True
# Return network profile if it is successfully updated.
if is_updated:
return self._make_network_profile_dict(
update_network_profile(context.session, id, p))
def get_network_profile(self, context, id, fields=None):
"""
Retrieve a network profile.
:param context: neutron api request context
:param id: UUID representing the network profile to retrieve
:params fields: a list of strings that are valid keys in a network
profile dictionary. Only these fields will be returned
:returns: network profile dictionary
"""
profile = get_network_profile(context.session, id)
return self._make_network_profile_dict(profile, fields)
def get_network_profiles(self, context, filters=None, fields=None):
"""
Retrieve a list of all network profiles.
Retrieve all network profiles if tenant is admin. For a non-admin
tenant, retrieve all network profiles belonging to this tenant only.
:param context: neutron api request context
:param filters: a dictionary with keys that are valid keys for a
network profile object. Values in this dictiontary are
an iterable containing values that will be used for an
exact match comparison for that value. Each result
returned by this function will have matched one of the
values for each key in filters
:params fields: a list of strings that are valid keys in a network
profile dictionary. Only these fields will be returned
:returns: list of all network profiles
"""
if context.is_admin:
return self._get_collection(context, n1kv_models_v2.NetworkProfile,
self._make_network_profile_dict,
filters=filters, fields=fields)
return self._get_network_collection_for_tenant(context.session,
n1kv_models_v2.
NetworkProfile,
context.tenant_id)
def add_network_profile_tenant(self,
db_session,
network_profile_id,
tenant_id):
"""
Add a tenant to a network profile.
:param db_session: database session
:param network_profile_id: UUID representing network profile
:param tenant_id: UUID representing the tenant
:returns: profile binding object
"""
return create_profile_binding(db_session,
tenant_id,
network_profile_id,
c_const.NETWORK)
def network_profile_exists(self, context, id):
"""
Verify whether a network profile for given id exists.
:param context: neutron api request context
:param id: UUID representing network profile
:returns: true if network profile exist else False
"""
try:
get_network_profile(context.session, id)
return True
except c_exc.NetworkProfileNotFound(profile=id):
return False
def _get_segment_range(self, data):
return (int(seg) for seg in data.split("-")[:2])
def _validate_network_profile_args(self, context, p):
"""
Validate completeness of Nexus1000V network profile arguments.
:param context: neutron api request context
:param p: network profile object
"""
self._validate_network_profile(p)
segment_type = p['segment_type'].lower()
if segment_type != c_const.NETWORK_TYPE_TRUNK:
self._validate_segment_range_uniqueness(context, p)
def _validate_segment_range(self, network_profile):
"""
Validate segment range values.
:param network_profile: network profile object
"""
if not re.match(r"(\d+)\-(\d+)", network_profile["segment_range"]):
msg = _("Invalid segment range. example range: 500-550")
raise n_exc.InvalidInput(error_message=msg)
def _validate_multicast_ip_range(self, network_profile):
"""
Validate multicast ip range values.
:param network_profile: network profile object
"""
try:
min_ip, max_ip = (network_profile
['multicast_ip_range'].split('-', 1))
except ValueError:
msg = _("Invalid multicast ip address range. "
"example range: 224.1.1.1-224.1.1.10")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
for ip in [min_ip, max_ip]:
try:
if not netaddr.IPAddress(ip).is_multicast():
msg = _("%s is not a valid multicast ip address") % ip
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if netaddr.IPAddress(ip) <= netaddr.IPAddress('224.0.0.255'):
msg = _("%s is reserved multicast ip address") % ip
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
except netaddr.AddrFormatError:
msg = _("%s is not a valid ip address") % ip
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if netaddr.IPAddress(min_ip) > netaddr.IPAddress(max_ip):
msg = (_("Invalid multicast IP range '%(min_ip)s-%(max_ip)s':"
" Range should be from low address to high address") %
{'min_ip': min_ip, 'max_ip': max_ip})
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
def _validate_network_profile(self, net_p):
"""
Validate completeness of a network profile arguments.
:param net_p: network profile object
"""
if any(net_p[arg] == "" for arg in ["segment_type"]):
msg = _("Arguments segment_type missing"
" for network profile")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
segment_type = net_p["segment_type"].lower()
if segment_type not in [c_const.NETWORK_TYPE_VLAN,
c_const.NETWORK_TYPE_OVERLAY,
c_const.NETWORK_TYPE_TRUNK,
c_const.NETWORK_TYPE_MULTI_SEGMENT]:
msg = _("segment_type should either be vlan, overlay, "
"multi-segment or trunk")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if segment_type == c_const.NETWORK_TYPE_VLAN:
if "physical_network" not in net_p:
msg = _("Argument physical_network missing "
"for network profile")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if segment_type == c_const.NETWORK_TYPE_TRUNK:
if net_p["segment_range"]:
msg = _("segment_range not required for trunk")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if segment_type in [c_const.NETWORK_TYPE_TRUNK,
c_const.NETWORK_TYPE_OVERLAY]:
if not attributes.is_attr_set(net_p.get("sub_type")):
msg = _("Argument sub_type missing "
"for network profile")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if segment_type in [c_const.NETWORK_TYPE_VLAN,
c_const.NETWORK_TYPE_OVERLAY]:
if "segment_range" not in net_p:
msg = _("Argument segment_range missing "
"for network profile")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
self._validate_segment_range(net_p)
if segment_type == c_const.NETWORK_TYPE_OVERLAY:
if net_p['sub_type'] != c_const.NETWORK_SUBTYPE_NATIVE_VXLAN:
net_p['multicast_ip_range'] = '0.0.0.0'
else:
multicast_ip_range = net_p.get("multicast_ip_range")
if not attributes.is_attr_set(multicast_ip_range):
msg = _("Argument multicast_ip_range missing"
" for VXLAN multicast network profile")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
self._validate_multicast_ip_range(net_p)
else:
net_p['multicast_ip_range'] = '0.0.0.0'
def _validate_segment_range_uniqueness(self, context, net_p, id=None):
"""
Validate that segment range doesn't overlap.
:param context: neutron api request context
:param net_p: network profile dictionary
:param id: UUID representing the network profile being updated
"""
segment_type = net_p["segment_type"].lower()
seg_min, seg_max = self._get_segment_range(net_p['segment_range'])
if segment_type == c_const.NETWORK_TYPE_VLAN:
if not ((seg_min <= seg_max) and
((seg_min in range(constants.MIN_VLAN_TAG,
c_const.NEXUS_VLAN_RESERVED_MIN) and
seg_max in range(constants.MIN_VLAN_TAG,
c_const.NEXUS_VLAN_RESERVED_MIN)) or
(seg_min in range(c_const.NEXUS_VLAN_RESERVED_MAX + 1,
constants.MAX_VLAN_TAG) and
seg_max in range(c_const.NEXUS_VLAN_RESERVED_MAX + 1,
constants.MAX_VLAN_TAG)))):
msg = (_("Segment range is invalid, select from "
"%(min)s-%(nmin)s, %(nmax)s-%(max)s") %
{"min": constants.MIN_VLAN_TAG,
"nmin": c_const.NEXUS_VLAN_RESERVED_MIN - 1,
"nmax": c_const.NEXUS_VLAN_RESERVED_MAX + 1,
"max": constants.MAX_VLAN_TAG - 1})
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
profiles = _get_network_profiles(
db_session=context.session,
physical_network=net_p["physical_network"]
)
elif segment_type in [c_const.NETWORK_TYPE_OVERLAY,
c_const.NETWORK_TYPE_MULTI_SEGMENT,
c_const.NETWORK_TYPE_TRUNK]:
if (seg_min > seg_max or
seg_min < c_const.NEXUS_VXLAN_MIN or
seg_max > c_const.NEXUS_VXLAN_MAX):
msg = (_("segment range is invalid. Valid range is : "
"%(min)s-%(max)s") %
{"min": c_const.NEXUS_VXLAN_MIN,
"max": c_const.NEXUS_VXLAN_MAX})
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
profiles = _get_network_profiles(db_session=context.session)
if profiles:
for profile in profiles:
if id and profile.id == id:
continue
name = profile.name
segment_range = profile.segment_range
if net_p["name"] == name:
msg = (_("NetworkProfile name %s already exists"),
net_p["name"])
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
if (c_const.NETWORK_TYPE_MULTI_SEGMENT in
[profile.segment_type, net_p["segment_type"]] or
c_const.NETWORK_TYPE_TRUNK in
[profile.segment_type, net_p["segment_type"]]):
continue
seg_min, seg_max = self._get_segment_range(
net_p["segment_range"])
profile_seg_min, profile_seg_max = self._get_segment_range(
segment_range)
if ((profile_seg_min <= seg_min <= profile_seg_max) or
(profile_seg_min <= seg_max <= profile_seg_max) or
((seg_min <= profile_seg_min) and
(seg_max >= profile_seg_max))):
msg = _("Segment range overlaps with another profile")
LOG.error(msg)
raise n_exc.InvalidInput(error_message=msg)
def _get_network_profile_by_name(self, db_session, name):
"""
Retrieve network profile based on name.
:param db_session: database session
:param name: string representing the name for the network profile
:returns: network profile object
"""
with db_session.begin(subtransactions=True):
try:
return (db_session.query(n1kv_models_v2.NetworkProfile).
filter_by(name=name).one())
except exc.NoResultFound:
raise c_exc.NetworkProfileNotFound(profile=name)
class PolicyProfile_db_mixin(object):
"""Policy Profile Mixin."""
def _get_policy_collection_for_tenant(self, db_session, model, tenant_id):
profile_ids = (db_session.query(n1kv_models_v2.
ProfileBinding.profile_id)
.filter_by(tenant_id=tenant_id).
filter_by(profile_type=c_const.POLICY).all())
profiles = db_session.query(model).filter(model.id.in_(
pid[0] for pid in profile_ids))
return [self._make_policy_profile_dict(p) for p in profiles]
def _make_policy_profile_dict(self, policy_profile, fields=None):
res = {"id": policy_profile["id"], "name": policy_profile["name"]}
return self._fields(res, fields)
def _make_profile_bindings_dict(self, profile_binding, fields=None):
res = {"profile_id": profile_binding["profile_id"],
"tenant_id": profile_binding["tenant_id"]}
return self._fields(res, fields)
def _policy_profile_exists(self, id):
db_session = db.get_session()
return (db_session.query(n1kv_models_v2.PolicyProfile).
filter_by(id=id).first())
def get_policy_profile(self, context, id, fields=None):
"""
Retrieve a policy profile for the given UUID.
:param context: neutron api request context
:param id: UUID representing policy profile to fetch
:params fields: a list of strings that are valid keys in a policy
profile dictionary. Only these fields will be returned
:returns: policy profile dictionary
"""
profile = get_policy_profile(context.session, id)
return self._make_policy_profile_dict(profile, fields)
def get_policy_profiles(self, context, filters=None, fields=None):
"""
Retrieve a list of policy profiles.
Retrieve all policy profiles if tenant is admin. For a non-admin
tenant, retrieve all policy profiles belonging to this tenant only.
:param context: neutron api request context
:param filters: a dictionary with keys that are valid keys for a
policy profile object. Values in this dictiontary are
an iterable containing values that will be used for an
exact match comparison for that value. Each result
returned by this function will have matched one of the
values for each key in filters
:params fields: a list of strings that are valid keys in a policy
profile dictionary. Only these fields will be returned
:returns: list of all policy profiles
"""
if context.is_admin:
return self._get_collection(context, n1kv_models_v2.PolicyProfile,
self._make_policy_profile_dict,
filters=filters, fields=fields)
else:
return self._get_policy_collection_for_tenant(context.session,
n1kv_models_v2.
PolicyProfile,
context.tenant_id)
def get_policy_profile_bindings(self, context, filters=None, fields=None):
"""
Retrieve a list of profile bindings for policy profiles.
:param context: neutron api request context
:param filters: a dictionary with keys that are valid keys for a
profile bindings object. Values in this dictiontary are
an iterable containing values that will be used for an
exact match comparison for that value. Each result
returned by this function will have matched one of the
values for each key in filters
:params fields: a list of strings that are valid keys in a profile
bindings dictionary. Only these fields will be returned
:returns: list of profile bindings
"""
if context.is_admin:
profile_bindings = _get_profile_bindings(
context.session,
profile_type=c_const.POLICY)
return [self._make_profile_bindings_dict(pb)
for pb in profile_bindings]
def update_policy_profile(self, context, id, policy_profile):
"""
Update a policy profile.
Add/remove policy profile to tenant-id binding for the corresponding
option and if user is admin.
:param context: neutron api request context
:param id: UUID representing policy profile to update
:param policy_profile: policy profile dictionary
:returns: updated policy profile dictionary
"""
p = policy_profile["policy_profile"]
if context.is_admin and "add_tenant" in p:
self.add_policy_profile_tenant(context.session,
id,
p["add_tenant"])
return self._make_policy_profile_dict(get_policy_profile(
context.session, id))
if context.is_admin and "remove_tenant" in p:
delete_profile_binding(context.session, p["remove_tenant"], id)
return self._make_policy_profile_dict(get_policy_profile(
context.session, id))
return self._make_policy_profile_dict(
update_policy_profile(context.session, id, p))
def add_policy_profile_tenant(self,
db_session,
policy_profile_id,
tenant_id):
"""
Add a tenant to a policy profile binding.
:param db_session: database session
:param policy_profile_id: UUID representing policy profile
:param tenant_id: UUID representing the tenant
:returns: profile binding object
"""
return create_profile_binding(db_session,
tenant_id,
policy_profile_id,
c_const.POLICY)
def remove_policy_profile_tenant(self, policy_profile_id, tenant_id):
"""
Remove a tenant to a policy profile binding.
:param policy_profile_id: UUID representing policy profile
:param tenant_id: UUID representing the tenant
"""
delete_profile_binding(None, tenant_id, policy_profile_id)
def _delete_policy_profile(self, policy_profile_id):
"""Delete policy profile and associated binding."""
db_session = db.get_session()
with db_session.begin(subtransactions=True):
(db_session.query(n1kv_models_v2.PolicyProfile).
filter_by(id=policy_profile_id).delete())
def _get_policy_profile_by_name(self, name):
"""
Retrieve policy profile based on name.
:param name: string representing the name for the policy profile
:returns: policy profile object
"""
db_session = db.get_session()
with db_session.begin(subtransactions=True):
return (db_session.query(n1kv_models_v2.PolicyProfile).
filter_by(name=name).one())
def _remove_all_fake_policy_profiles(self):
"""
Remove all policy profiles associated with fake tenant id.
This will find all Profile ID where tenant is not set yet - set A
and profiles where tenant was already set - set B
and remove what is in both and no tenant id set
"""
db_session = db.get_session()
with db_session.begin(subtransactions=True):
a_set_q = (db_session.query(n1kv_models_v2.ProfileBinding).
filter_by(tenant_id=c_const.TENANT_ID_NOT_SET,
profile_type=c_const.POLICY))
a_set = set(i.profile_id for i in a_set_q)
b_set_q = (db_session.query(n1kv_models_v2.ProfileBinding).
filter(sql.and_(n1kv_models_v2.ProfileBinding.
tenant_id != c_const.TENANT_ID_NOT_SET,
n1kv_models_v2.ProfileBinding.
profile_type == c_const.POLICY)))
b_set = set(i.profile_id for i in b_set_q)
(db_session.query(n1kv_models_v2.ProfileBinding).
filter(sql.and_(n1kv_models_v2.ProfileBinding.profile_id.
in_(a_set & b_set),
n1kv_models_v2.ProfileBinding.tenant_id ==
c_const.TENANT_ID_NOT_SET)).
delete(synchronize_session="fetch"))
def _add_policy_profile(self,
policy_profile_name,
policy_profile_id,
tenant_id=None):
"""
Add Policy profile and tenant binding.
:param policy_profile_name: string representing the name for the
policy profile
:param policy_profile_id: UUID representing the policy profile
:param tenant_id: UUID representing the tenant
"""
policy_profile = {"id": policy_profile_id, "name": policy_profile_name}
tenant_id = tenant_id or c_const.TENANT_ID_NOT_SET
if not self._policy_profile_exists(policy_profile_id):
create_policy_profile(policy_profile)
create_profile_binding(None,
tenant_id,
policy_profile["id"],
c_const.POLICY)
|
{
"content_hash": "f7c30a943118a1a0f02ada398a970f11",
"timestamp": "",
"source": "github",
"line_count": 1600,
"max_line_length": 79,
"avg_line_length": 42.76375,
"alnum_prop": 0.5907164362339599,
"repo_name": "gopal1cloud/neutron",
"id": "b9093da21ba9fd515a77978883dd70c1db74da26",
"size": "69224",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/plugins/cisco/db/n1kv_db_v2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1451"
},
{
"name": "Python",
"bytes": "9138456"
},
{
"name": "Shell",
"bytes": "9202"
}
],
"symlink_target": ""
}
|
"""
Module for managing the PROJ network settings.
"""
import os
from pathlib import Path
from typing import Union
import certifi
from pyproj._network import ( # noqa: F401 pylint: disable=unused-import
_set_ca_bundle_path,
is_network_enabled,
set_network_enabled,
)
def set_ca_bundle_path(ca_bundle_path: Union[Path, str, bool, None] = None) -> None:
"""
.. versionadded:: 3.0.0
Sets the path to the CA Bundle used by the `curl`
built into PROJ when PROJ network is enabled.
See: :c:func:`proj_context_set_ca_bundle_path`
Environment variables:
- PROJ_CURL_CA_BUNDLE
- CURL_CA_BUNDLE
- SSL_CERT_FILE
Parameters
----------
ca_bundle_path: Union[Path, str, bool, None], optional
Default is None, which only uses the `certifi` package path as a fallback if
the environment variables are not set. If a path is passed in, then
that will be the path used. If it is set to True, then it will default
to using the path provied by the `certifi` package. If it is set to False
or an empty string then it will default to the system settings or environment
variables.
"""
env_var_names = ("PROJ_CURL_CA_BUNDLE", "CURL_CA_BUNDLE", "SSL_CERT_FILE")
if ca_bundle_path is False:
# need to reset CA Bundle path to use system settings
# or environment variables because it
# could have been changed by the user previously
ca_bundle_path = ""
elif isinstance(ca_bundle_path, (str, Path)):
ca_bundle_path = str(ca_bundle_path)
elif (ca_bundle_path is True) or not any(
env_var_name in os.environ for env_var_name in env_var_names
):
ca_bundle_path = certifi.where()
else:
# reset CA Bundle path to use system settings
# or environment variables
ca_bundle_path = ""
_set_ca_bundle_path(ca_bundle_path)
|
{
"content_hash": "015e1f5a001fece658526cd069c04379",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 85,
"avg_line_length": 32.42372881355932,
"alnum_prop": 0.6523784631468897,
"repo_name": "ocefpaf/pyproj",
"id": "f25a34b450899ea8053ec7273a97b0d7a038ca65",
"size": "1913",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "pyproj/network.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "194556"
},
{
"name": "Makefile",
"bytes": "2671"
},
{
"name": "Python",
"bytes": "649387"
},
{
"name": "Shell",
"bytes": "10347"
}
],
"symlink_target": ""
}
|
""" PyTorch Speech2Text model."""
import math
import random
from typing import Optional, Tuple, Union
import torch
from torch import nn
from torch.nn import CrossEntropyLoss
from ...activations import ACT2FN
from ...modeling_outputs import (
BaseModelOutput,
BaseModelOutputWithPastAndCrossAttentions,
Seq2SeqLMOutput,
Seq2SeqModelOutput,
)
from ...modeling_utils import PreTrainedModel
from ...utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
from .configuration_speech_to_text import Speech2TextConfig
logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "Speech2TextConfig"
SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"facebook/s2t-small-librispeech-asr",
# See all Speech2Text models at https://huggingface.co/models?filter=speech_to_text
]
# Copied from transformers.models.bart.modeling_bart.shift_tokens_right
def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int):
"""
Shift input ids one token to the right.
"""
shifted_input_ids = input_ids.new_zeros(input_ids.shape)
shifted_input_ids[:, 1:] = input_ids[:, :-1].clone()
shifted_input_ids[:, 0] = decoder_start_token_id
if pad_token_id is None:
raise ValueError("self.model.config.pad_token_id has to be defined.")
# replace possible -100 values in labels by `pad_token_id`
shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id)
return shifted_input_ids
# Copied from transformers.models.bart.modeling_bart._make_causal_mask
def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0):
"""
Make causal mask used for bi-directional self-attention.
"""
bsz, tgt_len = input_ids_shape
mask = torch.full((tgt_len, tgt_len), torch.tensor(torch.finfo(dtype).min))
mask_cond = torch.arange(mask.size(-1))
mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0)
mask = mask.to(dtype)
if past_key_values_length > 0:
mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1)
return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length)
# Copied from transformers.models.bart.modeling_bart._expand_mask
def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None):
"""
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
"""
bsz, src_len = mask.size()
tgt_len = tgt_len if tgt_len is not None else src_len
expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype)
inverted_mask = 1.0 - expanded_mask
return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min)
class Conv1dSubsampler(nn.Module):
"""
Convolutional subsampler: a stack of 1D convolution (along temporal dimension) followed by non-linear activation
via gated linear units (https://arxiv.org/abs/1911.08460)
"""
def __init__(self, config):
super(Conv1dSubsampler, self).__init__()
self.config = config
self.num_layers = config.num_conv_layers
self.in_channels = config.input_feat_per_channel * config.input_channels
self.mid_channels = config.conv_channels
self.out_channels = config.d_model
self.kernel_sizes = config.conv_kernel_sizes
self.conv_layers = nn.ModuleList(
nn.Conv1d(
self.in_channels if i == 0 else self.mid_channels // 2,
self.mid_channels if i < self.num_layers - 1 else self.out_channels * 2,
kernel_size=k,
stride=2,
padding=k // 2,
)
for i, k in enumerate(self.kernel_sizes)
)
def forward(self, input_features):
hidden_states = input_features.transpose(1, 2).contiguous() # -> B x (C x D) x T
for conv in self.conv_layers:
hidden_states = conv(hidden_states)
hidden_states = nn.functional.glu(hidden_states, dim=1)
hidden_states = hidden_states.transpose(1, 2).contiguous() # -> T x B x (C x D)
return hidden_states
class Speech2TextSinusoidalPositionalEmbedding(nn.Module):
"""This module produces sinusoidal positional embeddings of any length."""
def __init__(self, num_positions: int, embedding_dim: int, padding_idx: Optional[int] = None):
super().__init__()
self.offset = 2
self.embedding_dim = embedding_dim
self.padding_idx = padding_idx
self.make_weights(num_positions + self.offset, embedding_dim, padding_idx)
def make_weights(self, num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None):
emb_weights = self.get_embedding(num_embeddings, embedding_dim, padding_idx)
if hasattr(self, "weights"):
# in forward put the weights on the correct dtype and device of the param
emb_weights = emb_weights.to(dtype=self.weights.dtype, device=self.weights.device)
self.weights = nn.Parameter(emb_weights)
self.weights.requires_grad = False
self.weights.detach_()
@staticmethod
def get_embedding(num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None):
"""
Build sinusoidal embeddings. This matches the implementation in tensor2tensor, but differs slightly from the
description in Section 3.5 of "Attention Is All You Need".
"""
half_dim = embedding_dim // 2
emb = math.log(10000) / (half_dim - 1)
emb = torch.exp(torch.arange(half_dim, dtype=torch.float) * -emb)
emb = torch.arange(num_embeddings, dtype=torch.float).unsqueeze(1) * emb.unsqueeze(0)
emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1).view(num_embeddings, -1)
if embedding_dim % 2 == 1:
# zero pad
emb = torch.cat([emb, torch.zeros(num_embeddings, 1)], dim=1)
if padding_idx is not None:
emb[padding_idx, :] = 0
return emb.to(torch.get_default_dtype())
@torch.no_grad()
def forward(self, input_ids: torch.Tensor, past_key_values_length: int = 0):
bsz, seq_len = input_ids.size()
# Create the position ids from the input token ids. Any padded tokens remain padded.
position_ids = self.create_position_ids_from_input_ids(input_ids, self.padding_idx, past_key_values_length).to(
input_ids.device
)
# expand embeddings if needed
max_pos = self.padding_idx + 1 + seq_len
if max_pos > self.weights.size(0):
self.make_weights(max_pos + self.offset, self.embedding_dim, self.padding_idx)
return self.weights.index_select(0, position_ids.view(-1)).view(bsz, seq_len, -1).detach()
def create_position_ids_from_input_ids(
self, input_ids: torch.Tensor, padding_idx: int, past_key_values_length: Optional[int] = 0
):
"""
Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding
symbols are ignored. This is modified from fairseq's `utils.make_positions`.
Args:
x: torch.Tensor x:
Returns: torch.Tensor
"""
# The series of casts and type-conversions here are carefully balanced to both work with ONNX export and XLA.
mask = input_ids.ne(padding_idx).int()
incremental_indices = (torch.cumsum(mask, dim=1).type_as(mask) + past_key_values_length) * mask
return incremental_indices.long() + padding_idx
# Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->Speech2Text
class Speech2TextAttention(nn.Module):
"""Multi-headed attention from 'Attention Is All You Need' paper"""
def __init__(
self,
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
):
super().__init__()
self.embed_dim = embed_dim
self.num_heads = num_heads
self.dropout = dropout
self.head_dim = embed_dim // num_heads
if (self.head_dim * num_heads) != self.embed_dim:
raise ValueError(
f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}"
f" and `num_heads`: {num_heads})."
)
self.scaling = self.head_dim**-0.5
self.is_decoder = is_decoder
self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):
return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()
def forward(
self,
hidden_states: torch.Tensor,
key_value_states: Optional[torch.Tensor] = None,
past_key_value: Optional[Tuple[torch.Tensor]] = None,
attention_mask: Optional[torch.Tensor] = None,
layer_head_mask: Optional[torch.Tensor] = None,
output_attentions: bool = False,
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
"""Input shape: Batch x Time x Channel"""
# if key_value_states are provided this layer is used as a cross-attention layer
# for the decoder
is_cross_attention = key_value_states is not None
bsz, tgt_len, _ = hidden_states.size()
# get query proj
query_states = self.q_proj(hidden_states) * self.scaling
# get key, value proj
if is_cross_attention and past_key_value is not None:
# reuse k,v, cross_attentions
key_states = past_key_value[0]
value_states = past_key_value[1]
elif is_cross_attention:
# cross_attentions
key_states = self._shape(self.k_proj(key_value_states), -1, bsz)
value_states = self._shape(self.v_proj(key_value_states), -1, bsz)
elif past_key_value is not None:
# reuse k, v, self_attention
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
key_states = torch.cat([past_key_value[0], key_states], dim=2)
value_states = torch.cat([past_key_value[1], value_states], dim=2)
else:
# self_attention
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
if self.is_decoder:
# if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.
# Further calls to cross_attention layer can then reuse all cross-attention
# key/value_states (first "if" case)
# if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of
# all previous decoder key/value_states. Further calls to uni-directional self-attention
# can concat previous decoder key/value_states to current projected key/value_states (third "elif" case)
# if encoder bi-directional self-attention `past_key_value` is always `None`
past_key_value = (key_states, value_states)
proj_shape = (bsz * self.num_heads, -1, self.head_dim)
query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape)
key_states = key_states.view(*proj_shape)
value_states = value_states.view(*proj_shape)
src_len = key_states.size(1)
attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))
if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len):
raise ValueError(
f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is"
f" {attn_weights.size()}"
)
if attention_mask is not None:
if attention_mask.size() != (bsz, 1, tgt_len, src_len):
raise ValueError(
f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}"
)
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
attn_weights = nn.functional.softmax(attn_weights, dim=-1)
if layer_head_mask is not None:
if layer_head_mask.size() != (self.num_heads,):
raise ValueError(
f"Head mask for a single layer should be of size {(self.num_heads,)}, but is"
f" {layer_head_mask.size()}"
)
attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
if output_attentions:
# this operation is a bit awkward, but it's required to
# make sure that attn_weights keeps its gradient.
# In order to do so, attn_weights have to be reshaped
# twice and have to be reused in the following
attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len)
else:
attn_weights_reshaped = None
attn_probs = nn.functional.dropout(attn_weights, p=self.dropout, training=self.training)
attn_output = torch.bmm(attn_probs, value_states)
if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim):
raise ValueError(
f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is"
f" {attn_output.size()}"
)
attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim)
attn_output = attn_output.transpose(1, 2)
# Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be
# partitioned aross GPUs when using tensor-parallelism.
attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim)
attn_output = self.out_proj(attn_output)
return attn_output, attn_weights_reshaped, past_key_value
class Speech2TextEncoderLayer(nn.Module):
def __init__(self, config: Speech2TextConfig):
super().__init__()
self.embed_dim = config.d_model
self.self_attn = Speech2TextAttention(
embed_dim=self.embed_dim,
num_heads=config.encoder_attention_heads,
dropout=config.attention_dropout,
)
self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
self.activation_dropout = config.activation_dropout
self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim)
self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim)
self.final_layer_norm = nn.LayerNorm(self.embed_dim)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: torch.Tensor,
layer_head_mask: torch.Tensor,
output_attentions: bool = False,
):
"""
Args:
hidden_states (`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)`
attention_mask (`torch.FloatTensor`): attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size
`(config.encoder_attention_heads,)`.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
"""
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
hidden_states, attn_weights, _ = self.self_attn(
hidden_states=hidden_states,
attention_mask=attention_mask,
layer_head_mask=layer_head_mask,
output_attentions=output_attentions,
)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training)
hidden_states = self.fc2(hidden_states)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
if hidden_states.dtype == torch.float16 and (
torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any()
):
clamp_value = torch.finfo(hidden_states.dtype).max - 1000
hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value)
outputs = (hidden_states,)
if output_attentions:
outputs += (attn_weights,)
return outputs
class Speech2TextDecoderLayer(nn.Module):
def __init__(self, config: Speech2TextConfig):
super().__init__()
self.embed_dim = config.d_model
self.self_attn = Speech2TextAttention(
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
self.activation_dropout = config.activation_dropout
self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.encoder_attn = Speech2TextAttention(
self.embed_dim,
config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
)
self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim)
self.fc2 = nn.Linear(config.decoder_ffn_dim, self.embed_dim)
self.final_layer_norm = nn.LayerNorm(self.embed_dim)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
layer_head_mask: Optional[torch.Tensor] = None,
cross_attn_layer_head_mask: Optional[torch.Tensor] = None,
past_key_value: Optional[Tuple[torch.Tensor]] = None,
output_attentions: Optional[bool] = False,
use_cache: Optional[bool] = True,
):
"""
Args:
hidden_states (`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)`
attention_mask (`torch.FloatTensor`): attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
encoder_hidden_states (`torch.FloatTensor`):
cross attention input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_attention_mask (`torch.FloatTensor`): encoder attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
layer_head_mask (`torch.FloatTensor`): mask for attention heads in a given layer of size
`(encoder_attention_heads,)`.
cross_attn_layer_head_mask (`torch.FloatTensor`): mask for cross-attention heads in a given layer of
size *(decoder_attention_heads,)*.
past_key_value (`Tuple(torch.FloatTensor)`): cached past key and value projection states
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
"""
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
# Self Attention
# decoder uni-directional self-attention cached key/values tuple is at positions 1,2
self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
# add present self-attn cache to positions 1,2 of present_key_value tuple
hidden_states, self_attn_weights, present_key_value = self.self_attn(
hidden_states=hidden_states,
past_key_value=self_attn_past_key_value,
attention_mask=attention_mask,
layer_head_mask=layer_head_mask,
output_attentions=output_attentions,
)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
# Cross-Attention Block
cross_attn_present_key_value = None
cross_attn_weights = None
if encoder_hidden_states is not None:
residual = hidden_states
hidden_states = self.encoder_attn_layer_norm(hidden_states)
# cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple
cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None
hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn(
hidden_states=hidden_states,
key_value_states=encoder_hidden_states,
attention_mask=encoder_attention_mask,
layer_head_mask=cross_attn_layer_head_mask,
past_key_value=cross_attn_past_key_value,
output_attentions=output_attentions,
)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
# add cross-attn to positions 3,4 of present_key_value tuple
present_key_value = present_key_value + cross_attn_present_key_value
# Fully Connected
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = nn.functional.dropout(hidden_states, p=self.activation_dropout, training=self.training)
hidden_states = self.fc2(hidden_states)
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
outputs = (hidden_states,)
if output_attentions:
outputs += (self_attn_weights, cross_attn_weights)
if use_cache:
outputs += (present_key_value,)
return outputs
class Speech2TextPreTrainedModel(PreTrainedModel):
config_class = Speech2TextConfig
base_model_prefix = "model"
main_input_name = "input_features"
supports_gradient_checkpointing = True
def _init_weights(self, module):
std = self.config.init_std
if isinstance(module, (nn.Linear, nn.Conv1d)):
module.weight.data.normal_(mean=0.0, std=std)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=std)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, (Speech2TextDecoder, Speech2TextEncoder)):
module.gradient_checkpointing = value
def _get_feat_extract_output_lengths(self, input_lengths: torch.LongTensor):
"""
Computes the output length of the convolutional layers
"""
for i in range(self.config.num_conv_layers):
input_lengths = (input_lengths - 1) // 2 + 1
return input_lengths
def _get_feature_vector_attention_mask(self, feature_vector_length, attention_mask):
# generate creates 3D attention mask, because of the shape of input_features
# convert it to 2D if thats the case
if len(attention_mask.shape) > 2:
attention_mask = attention_mask[:, :, -1]
subsampled_lengths = self._get_feat_extract_output_lengths(attention_mask.sum(-1))
bsz = attention_mask.size()[0]
attention_mask = torch.zeros(
(bsz, feature_vector_length), dtype=attention_mask.dtype, device=attention_mask.device
)
# these two operations makes sure that all values
# before the output lengths indices are attended to
attention_mask[(torch.arange(bsz, device=attention_mask.device), subsampled_lengths - 1)] = 1
attention_mask = attention_mask.flip([-1]).cumsum(-1).flip([-1]).long()
return attention_mask
SPEECH_TO_TEXT_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
etc.)
This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
and behavior.
Parameters:
config ([`Speech2TextConfig`]):
Model configuration class with all the parameters of the model. Initializing with a config file does not
load the weights associated with the model, only the configuration. Check out the
[`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
SPEECH_TO_TEXT_INPUTS_DOCSTRING = r"""
Args:
input_features (`torch.FloatTensor` of shape `(batch_size, sequence_length, feature_size)`):
Float values of fbank features extracted from the raw speech waveform. Raw speech waveform can be obtained
by loading a `.flac` or `.wav` audio file into an array of type `List[float]` or a `numpy.ndarray`, *e.g.*
via the soundfile library (`pip install soundfile`). To prepare the array into `input_features`, the
[`Speech2TextFeatureExtractor`] should be used for extracting the fbank features, padding and conversion
into a tensor of type `torch.FloatTensor`. See [`~Speech2TextFeatureExtractor.__call__`]
attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing convolution and attention on padding token indices. Mask values selected in `[0,
1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
Indices of decoder input sequence tokens in the vocabulary.
Indices can be obtained using [`SpeechToTextTokenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
[What are decoder input IDs?](../glossary#decoder-input-ids)
SpeechToText uses the `eos_token_id` as the starting token for `decoder_input_ids` generation. If
`past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see
`past_key_values`).
decoder_attention_mask (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
be used by default.
If you want to change padding behavior, you should read
[`modeling_speech_to_text._prepare_decoder_attention_mask`] and modify to your needs. See diagram 1 in [the
paper](https://arxiv.org/abs/1910.13461) for more information on the default strategy.
head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*):
Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
decoder_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
encoder_outputs (`tuple(tuple(torch.FloatTensor)`, *optional*):
Tuple consists of (`last_hidden_state`, *optional*: `hidden_states`, *optional*: `attentions`)
`last_hidden_state` of shape `(batch_size, sequence_length, hidden_size)`, *optional*) is a sequence of
hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder.
past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape
`(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape
`(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`.
Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention
blocks) that can be used (see `past_key_values` input) to speed up sequential decoding.
If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that
don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all
`decoder_input_ids` of shape `(batch_size, sequence_length)`. decoder_inputs_embeds (`torch.FloatTensor` of
shape `(batch_size, target_sequence_length, hidden_size)`, *optional*): Optionally, instead of passing
`decoder_input_ids` you can choose to directly pass an embedded representation. If `past_key_values` is
used, optionally only the last `decoder_inputs_embeds` have to be input (see `past_key_values`). This is
useful if you want more control over how to convert `decoder_input_ids` indices into associated vectors
than the model's internal embedding lookup matrix.
If `decoder_input_ids` and `decoder_inputs_embeds` are both unset, `decoder_inputs_embeds` takes the value
of `inputs_embeds`.
use_cache (`bool`, *optional*):
If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
`past_key_values`).
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
"""
class Speech2TextEncoder(Speech2TextPreTrainedModel):
"""
Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a
[`Speech2TextEncoderLayer`].
Args:
config: Speech2TextConfig
embed_tokens (nn.Embedding): output embedding
"""
def __init__(self, config: Speech2TextConfig):
super().__init__(config)
self.dropout = config.dropout
self.layerdrop = config.encoder_layerdrop
embed_dim = config.d_model
self.padding_idx = config.pad_token_id
self.max_source_positions = config.max_source_positions
self.embed_scale = math.sqrt(embed_dim) if config.scale_embedding else 1.0
self.conv = Conv1dSubsampler(config)
self.embed_positions = Speech2TextSinusoidalPositionalEmbedding(
self.max_source_positions,
embed_dim,
self.padding_idx,
)
self.layers = nn.ModuleList([Speech2TextEncoderLayer(config) for _ in range(config.encoder_layers)])
self.layer_norm = nn.LayerNorm(config.d_model)
self.gradient_checkpointing = False
# Initialize weights and apply final processing
self.post_init()
def forward(
self,
input_features,
attention_mask=None,
head_mask=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Args:
input_features (`torch.LongTensor` of shape `(batch_size, sequence_length, feature_size)`):
Float values of fbank features extracted from the raw speech waveform. Raw speech waveform can be
obtained by loading a `.flac` or `.wav` audio file into an array of type `List[float]` or a
`numpy.ndarray`, *e.g.* via the soundfile library (`pip install soundfile`). To prepare the array into
`input_features`, the [`Speech2TextFeatureExtractor`] should be used for extracting the fbank features,
padding and conversion into a tensor of type `torch.FloatTensor`. See
[`~Speech2TextFeatureExtractor.__call__`]
attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing convolution and attention on padding token indices. Mask values selected in
`[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*):
Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors
for more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
inputs_embeds = self.conv(input_features)
inputs_embeds = self.embed_scale * inputs_embeds
# subsample attention mask if necessary
if attention_mask is not None:
attention_mask = self._get_feature_vector_attention_mask(inputs_embeds.shape[1], attention_mask)
padding_mask = attention_mask.ne(1).long()
else:
padding_mask = torch.zeros(inputs_embeds.shape[:2], dtype=torch.long, device=inputs_embeds.device)
embed_pos = self.embed_positions(padding_mask)
hidden_states = inputs_embeds + embed_pos
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
# expand attention_mask
if attention_mask is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype)
encoder_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None
# check if head_mask has a correct number of layers specified if desired
if head_mask is not None:
assert head_mask.size()[0] == (
len(self.layers)
), f"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}."
for idx, encoder_layer in enumerate(self.layers):
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
dropout_probability = random.uniform(0, 1)
if self.training and (dropout_probability < self.layerdrop): # skip the layer
layer_outputs = (None, None)
else:
if self.gradient_checkpointing and self.training:
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(encoder_layer),
hidden_states,
attention_mask,
(head_mask[idx] if head_mask is not None else None),
)
else:
layer_outputs = encoder_layer(
hidden_states,
attention_mask,
layer_head_mask=(head_mask[idx] if head_mask is not None else None),
output_attentions=output_attentions,
)
hidden_states = layer_outputs[0]
if output_attentions:
all_attentions = all_attentions + (layer_outputs[1],)
hidden_states = self.layer_norm(hidden_states)
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)
return BaseModelOutput(
last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions
)
class Speech2TextDecoder(Speech2TextPreTrainedModel):
"""
Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a [`Speech2TextDecoderLayer`]
Args:
config: Speech2TextConfig
embed_tokens (nn.Embedding): output embedding
"""
def __init__(self, config: Speech2TextConfig):
super().__init__(config)
self.dropout = config.dropout
self.layerdrop = config.decoder_layerdrop
self.padding_idx = config.pad_token_id
self.max_target_positions = config.max_target_positions
self.embed_scale = math.sqrt(config.d_model) if config.scale_embedding else 1.0
self.embed_tokens = nn.Embedding(config.vocab_size, config.d_model, self.padding_idx)
self.embed_positions = Speech2TextSinusoidalPositionalEmbedding(
self.max_target_positions,
config.d_model,
self.padding_idx,
)
self.layers = nn.ModuleList([Speech2TextDecoderLayer(config) for _ in range(config.decoder_layers)])
self.layer_norm = nn.LayerNorm(config.d_model)
self.gradient_checkpointing = False
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.embed_tokens
def set_input_embeddings(self, value):
self.embed_tokens = value
def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length):
# create causal mask
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
combined_attention_mask = None
if input_shape[-1] > 1:
combined_attention_mask = _make_causal_mask(
input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length
).to(inputs_embeds.device)
if attention_mask is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1])
combined_attention_mask = (
expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask
)
return combined_attention_mask
def forward(
self,
input_ids=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
head_mask=None,
cross_attn_head_mask=None,
past_key_values=None,
inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Args:
input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using [`Speech2TextTokenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
[What are input IDs?](../glossary#input-ids)
attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
of the decoder.
encoder_attention_mask (`torch.LongTensor` of shape `(batch_size, encoder_sequence_length)`, *optional*):
Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values
selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention
on hidden heads. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of
shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of
shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`.
Contains pre-computed hidden-states (key and values in the self-attention blocks and in the
cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding.
If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those
that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of
all `decoder_input_ids` of shape `(batch_size, sequence_length)`. inputs_embeds (`torch.FloatTensor` of
shape `(batch_size, sequence_length, hidden_size)`, *optional*): Optionally, instead of passing
`input_ids` you can choose to directly pass an embedded representation. This is useful if you want more
control over how to convert `input_ids` indices into associated vectors than the model's internal
embedding lookup matrix.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors
for more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# retrieve input_ids and inputs_embeds
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds")
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if inputs_embeds is None:
inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale
attention_mask = self._prepare_decoder_attention_mask(
attention_mask, input_shape, inputs_embeds, past_key_values_length
)
# expand encoder attention mask
if encoder_hidden_states is not None and encoder_attention_mask is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1])
# embed positions
positions = self.embed_positions(input_ids, past_key_values_length=past_key_values_length)
hidden_states = inputs_embeds + positions
hidden_states = nn.functional.dropout(hidden_states, p=self.dropout, training=self.training)
# decoder layers
all_hidden_states = () if output_hidden_states else None
all_self_attns = () if output_attentions else None
all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None
next_decoder_cache = () if use_cache else None
# check if head_mask/cross_attn_head_mask has a correct number of layers specified if desired
for attn_mask, mask_name in zip([head_mask, cross_attn_head_mask], ["head_mask", "cross_attn_head_mask"]):
if attn_mask is not None:
assert attn_mask.size()[0] == (len(self.layers)), (
f"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for"
f" {head_mask.size()[0]}."
)
for idx, decoder_layer in enumerate(self.layers):
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
if output_hidden_states:
all_hidden_states += (hidden_states,)
dropout_probability = random.uniform(0, 1)
if self.training and (dropout_probability < self.layerdrop):
continue
past_key_value = past_key_values[idx] if past_key_values is not None else None
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
"`use_cache = True` is incompatible with gradient checkpointing. Setting `use_cache ="
" False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
# None for past_key_value
return module(*inputs, output_attentions, use_cache)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(decoder_layer),
hidden_states,
attention_mask,
encoder_hidden_states,
encoder_attention_mask,
head_mask[idx] if head_mask is not None else None,
cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None,
None,
)
else:
layer_outputs = decoder_layer(
hidden_states,
attention_mask=attention_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
layer_head_mask=(head_mask[idx] if head_mask is not None else None),
cross_attn_layer_head_mask=(
cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None
),
past_key_value=past_key_value,
output_attentions=output_attentions,
use_cache=use_cache,
)
hidden_states = layer_outputs[0]
if use_cache:
next_decoder_cache += (layer_outputs[3 if output_attentions else 1],)
if output_attentions:
all_self_attns += (layer_outputs[1],)
if encoder_hidden_states is not None:
all_cross_attentions += (layer_outputs[2],)
hidden_states = self.layer_norm(hidden_states)
# add hidden states from the last decoder layer
if output_hidden_states:
all_hidden_states += (hidden_states,)
next_cache = next_decoder_cache if use_cache else None
if not return_dict:
return tuple(
v
for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions]
if v is not None
)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=next_cache,
hidden_states=all_hidden_states,
attentions=all_self_attns,
cross_attentions=all_cross_attentions,
)
@add_start_docstrings(
"The bare Speech2Text Model outputting raw hidden-states without any specific head on top.",
SPEECH_TO_TEXT_START_DOCSTRING,
)
class Speech2TextModel(Speech2TextPreTrainedModel):
def __init__(self, config: Speech2TextConfig):
super().__init__(config)
self.encoder = Speech2TextEncoder(config)
self.decoder = Speech2TextDecoder(config)
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.decoder.embed_tokens
def set_input_embeddings(self, value):
self.decoder.embed_tokens = value
def get_encoder(self):
return self.encoder
def get_decoder(self):
return self.decoder
@add_start_docstrings_to_model_forward(SPEECH_TO_TEXT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_features: Optional[torch.LongTensor] = None,
attention_mask: Optional[torch.Tensor] = None,
decoder_input_ids: Optional[torch.LongTensor] = None,
decoder_attention_mask: Optional[torch.LongTensor] = None,
head_mask: Optional[torch.Tensor] = None,
decoder_head_mask: Optional[torch.Tensor] = None,
cross_attn_head_mask: Optional[torch.Tensor] = None,
encoder_outputs: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
decoder_inputs_embeds: Optional[torch.FloatTensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple[torch.FloatTensor], Seq2SeqLMOutput]:
r"""
Returns:
Example:
```python
>>> import torch
>>> from transformers import Speech2TextModel, Speech2TextFeatureExtractor
>>> from datasets import load_dataset
>>> model = Speech2TextModel.from_pretrained("facebook/s2t-small-librispeech-asr")
>>> feature_extractor = Speech2TextFeatureExtractor.from_pretrained("facebook/s2t-small-librispeech-asr")
>>> ds = load_dataset("hf-internal-testing/librispeech_asr_dummy", "clean", split="validation")
>>> inputs = feature_extractor(
... ds[0]["audio"]["array"], sampling_rate=ds[0]["audio"]["sampling_rate"], return_tensors="pt"
... )
>>> input_features = inputs.input_features
>>> decoder_input_ids = torch.tensor([[1, 1]]) * model.config.decoder_start_token_id
>>> last_hidden_state = model(input_features, decoder_input_ids=decoder_input_ids).last_hidden_state
>>> list(last_hidden_state.shape)
[1, 2, 256]
```"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if encoder_outputs is None:
encoder_outputs = self.encoder(
input_features,
attention_mask=attention_mask,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
# If the user passed a tuple for encoder_outputs, we wrap it in a BaseModelOutput when return_dict=True
elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):
encoder_outputs = BaseModelOutput(
last_hidden_state=encoder_outputs[0],
hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,
attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,
)
# downsample encoder attention mask
if attention_mask is not None:
encoder_attention_mask = self._get_feature_vector_attention_mask(
encoder_outputs[0].shape[1], attention_mask
)
else:
encoder_attention_mask = None
# decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn)
decoder_outputs = self.decoder(
input_ids=decoder_input_ids,
attention_mask=decoder_attention_mask,
encoder_hidden_states=encoder_outputs[0],
encoder_attention_mask=encoder_attention_mask,
head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
past_key_values=past_key_values,
inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
if not return_dict:
return decoder_outputs + encoder_outputs
return Seq2SeqModelOutput(
last_hidden_state=decoder_outputs.last_hidden_state,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
cross_attentions=decoder_outputs.cross_attentions,
encoder_last_hidden_state=encoder_outputs.last_hidden_state,
encoder_hidden_states=encoder_outputs.hidden_states,
encoder_attentions=encoder_outputs.attentions,
)
@add_start_docstrings(
"The Speech2Text Model with a language modeling head. Can be used for summarization.",
SPEECH_TO_TEXT_START_DOCSTRING,
)
class Speech2TextForConditionalGeneration(Speech2TextPreTrainedModel):
base_model_prefix = "model"
_keys_to_ignore_on_load_missing = [
r"encoder.version",
r"decoder.version",
r"model.encoder.embed_positions.weights",
r"model.decoder.embed_positions.weights",
r"lm_head.weight",
]
_keys_to_ignore_on_save = [
r"model.encoder.embed_positions.weights",
r"model.decoder.embed_positions.weights",
]
def __init__(self, config: Speech2TextConfig):
super().__init__(config)
self.model = Speech2TextModel(config)
self.lm_head = nn.Linear(config.d_model, self.config.vocab_size, bias=False)
# Initialize weights and apply final processing
self.post_init()
def get_encoder(self):
return self.model.get_encoder()
def get_decoder(self):
return self.model.get_decoder()
def resize_token_embeddings(self, new_num_tokens: int) -> nn.Embedding:
new_embeddings = super().resize_token_embeddings(new_num_tokens)
return new_embeddings
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
@add_start_docstrings_to_model_forward(SPEECH_TO_TEXT_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_features: Optional[torch.LongTensor] = None,
attention_mask: Optional[torch.Tensor] = None,
decoder_input_ids: Optional[torch.LongTensor] = None,
decoder_attention_mask: Optional[torch.LongTensor] = None,
head_mask: Optional[torch.Tensor] = None,
decoder_head_mask: Optional[torch.Tensor] = None,
cross_attn_head_mask: Optional[torch.Tensor] = None,
encoder_outputs: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
decoder_inputs_embeds: Optional[torch.FloatTensor] = None,
labels: Optional[torch.LongTensor] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple[torch.FloatTensor], Seq2SeqLMOutput]:
r"""
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for computing the language modeling loss. Indices should either be in `[0, ..., config.vocab_size]`
or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored (masked), the loss is
only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
Returns:
Example:
```python
>>> import torch
>>> from transformers import Speech2TextProcessor, Speech2TextForConditionalGeneration
>>> from datasets import load_dataset
>>> model = Speech2TextForConditionalGeneration.from_pretrained("facebook/s2t-small-librispeech-asr")
>>> processor = Speech2TextProcessor.from_pretrained("facebook/s2t-small-librispeech-asr")
>>> ds = load_dataset("hf-internal-testing/librispeech_asr_dummy", "clean", split="validation")
>>> inputs = processor(
... ds[0]["audio"]["array"], sampling_rate=ds[0]["audio"]["sampling_rate"], return_tensors="pt"
... )
>>> input_features = inputs.input_features
>>> generated_ids = model.generate(inputs=input_features)
>>> transcription = processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
>>> transcription
'mister quilter is the apostle of the middle classes and we are glad to welcome his gospel'
```"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is not None:
if decoder_input_ids is None and decoder_inputs_embeds is None:
decoder_input_ids = shift_tokens_right(
labels, self.config.pad_token_id, self.config.decoder_start_token_id
)
outputs = self.model(
input_features,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
encoder_outputs=encoder_outputs,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
past_key_values=past_key_values,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
lm_logits = self.lm_head(outputs[0])
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(lm_logits.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (lm_logits,) + outputs[1:]
return ((loss,) + output) if loss is not None else output
return Seq2SeqLMOutput(
loss=loss,
logits=lm_logits,
past_key_values=outputs.past_key_values,
decoder_hidden_states=outputs.decoder_hidden_states,
decoder_attentions=outputs.decoder_attentions,
cross_attentions=outputs.cross_attentions,
encoder_last_hidden_state=outputs.encoder_last_hidden_state,
encoder_hidden_states=outputs.encoder_hidden_states,
encoder_attentions=outputs.encoder_attentions,
)
def prepare_inputs_for_generation(
self,
decoder_input_ids,
past=None,
attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
use_cache=None,
encoder_outputs=None,
**kwargs
):
# cut decoder_input_ids if past is used
if past is not None:
decoder_input_ids = decoder_input_ids[:, -1:]
return {
"encoder_outputs": encoder_outputs,
"past_key_values": past,
"decoder_input_ids": decoder_input_ids,
"attention_mask": attention_mask,
"head_mask": head_mask,
"decoder_head_mask": decoder_head_mask,
"cross_attn_head_mask": cross_attn_head_mask,
"use_cache": use_cache, # change this to avoid caching (presumably for debugging)
}
@staticmethod
def _reorder_cache(past, beam_idx):
reordered_past = ()
for layer_past in past:
reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),)
return reordered_past
|
{
"content_hash": "ad997004f222036baabe06690eddda6c",
"timestamp": "",
"source": "github",
"line_count": 1407,
"max_line_length": 150,
"avg_line_length": 46.8862828713575,
"alnum_prop": 0.6263548030135366,
"repo_name": "huggingface/transformers",
"id": "c464f4aa842ba5eba7644ad598ef358485ab7ed8",
"size": "66619",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/transformers/models/speech_to_text/modeling_speech_to_text.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6021"
},
{
"name": "C++",
"bytes": "12959"
},
{
"name": "Cuda",
"bytes": "175419"
},
{
"name": "Dockerfile",
"bytes": "18218"
},
{
"name": "Jsonnet",
"bytes": "937"
},
{
"name": "Makefile",
"bytes": "3430"
},
{
"name": "Python",
"bytes": "35742012"
},
{
"name": "Shell",
"bytes": "30374"
}
],
"symlink_target": ""
}
|
''' Functions to extract data and create databases.
'''
from dmrg_helpers.extract.database import Database
from dmrg_helpers.extract.locate_estimator_files import locate_estimator_files
from dmrg_helpers.core.dmrg_logging import logger
def create_db_from_file(filename, database_name=":memory:"):
"""Creates a database with the data extracted for a file.
The file must be an estimators.dat-type file. A new database is created.
The database is created in memory if no database_name is provided.
Parameters
----------
filename: a string.
The filename of the estimators.dat file to be read. The path can be
relative or absolute.
database_name: a string (defaulted to ":memory:").
The name of the file to which the database will be saved.
Returns
-------
A Database object.
"""
db = Database(database_name)
db.insert_data_from_file(filename)
logger.info('File {0} inserted in database {1}'.format(filename,
database_name))
return db
def create_db_from_files(files, database_name=":memory:"):
"""Creates a database with the data extracted for a list fo files.
The file must be an estimators.dat-type file. A new database is created.
The database is created in memory if no database_name is provided.
Parameters
----------
filename: a list of strings.
The filenames of the estimators.dat files to be read. The path can be
relative or absolute.
database_name: a string (defaulted to ":memory:").
The name of the file to which the database will be saved.
Returns
-------
A Database object.
"""
db = Database(database_name)
for filename in files:
db.insert_data_from_file(filename)
return db
def create_db_from_dir(root_dir, database_name=":memory:",
pattern='estimators.dat'):
"""Creates a database with the data extracted by crawling a dir.
The function crawls down a dir a picks up all the files whose name follows
the `pattern`. The files must be estimators.dat-type files. A new
database is created. The database is created in memory if no database_name
is provided.
Parameters
----------
filename: a list of strings.
The filenames of the estimators.dat files to be read. The path can be
relative or absolute.
database_name: a string (defaulted to ":memory:").
The name of the file to which the database will be saved.
Returns
-------
A Database object.
"""
files_found = locate_estimator_files(root_dir, pattern)
db = create_db_from_files(files_found, database_name)
return db
|
{
"content_hash": "ee0d3778bf65f8e9eaccb16bfe782c58",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 79,
"avg_line_length": 35.506493506493506,
"alnum_prop": 0.6565471836137527,
"repo_name": "iglpdc/dmrg_helpers",
"id": "336ad0ee3928f98bda763c80070294ff09ebc41c",
"size": "2734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dmrg_helpers/extract/extract.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "95364"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
from django.views.generic import TemplateView
from django.contrib import admin
from posts.views import PostListView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', PostListView.as_view(), name='index'),
url(r'^about/', TemplateView.as_view(template_name="about.html")),
url(r'^admin/', include(admin.site.urls)),
url(r'^news/', include('news.urls')),
url(r'^posts/', include('posts.urls')),
url(r'^profiles/', include('profiles.urls')),
url(r'^sources/', include('sources.urls')),
url(r'^groups/', include('groups.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
url(r'^coworking/', include('coworking.urls')),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}),
(r'^accounts/', include('allauth.urls')),
)
|
{
"content_hash": "f7f31fbf276e3d002b93c302cc99394a",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 84,
"avg_line_length": 39.59090909090909,
"alnum_prop": 0.661308840413318,
"repo_name": "inchingforward/gh",
"id": "52c77812ed2d5cf99e2f05e201e5c0be4aa1c1ba",
"size": "871",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gh/gh/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3811"
},
{
"name": "HTML",
"bytes": "55560"
},
{
"name": "JavaScript",
"bytes": "547"
},
{
"name": "Python",
"bytes": "72879"
}
],
"symlink_target": ""
}
|
"""gcloud dns record-sets changes list command."""
from googlecloudsdk.api_lib.dns import util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import list_printer
from googlecloudsdk.core import properties
from googlecloudsdk.third_party.apitools.base.py import list_pager
class List(base.Command):
"""View the list of changes that have been made to your record-sets.
This command displays the list of changes that have been made to your
record-sets.
"""
detailed_help = {
'DESCRIPTION': '{description}',
'EXAMPLES': """\
To see the list of changes, run:
$ {command}
To see the list of first 10 changes, run:
$ {command} --limit=10
""",
}
@staticmethod
def Args(parser):
util.ZONE_FLAG.AddToParser(parser)
parser.add_argument(
'--limit', default=None, required=False, type=int,
help='Maximum number of changes to list.')
parser.add_argument(
'--sort-order', default=None, required=False,
choices=['ascending', 'descending'],
help='Sort order for listing (ascending|descending).')
def Run(self, args):
dns_client = self.context['dns_client']
dns_messages = self.context['dns_messages']
project_id = properties.VALUES.core.project.Get(required=True)
return list_pager.YieldFromList(
dns_client.changes,
dns_messages.DnsChangesListRequest(
project=project_id,
managedZone=args.zone,
sortOrder=args.sort_order),
limit=args.limit, field='changes')
@util.HandleHttpError
def Display(self, args, result):
list_printer.PrintResourceList('dns.changes', result)
|
{
"content_hash": "17192c0389acab74a602414edfce874f",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 71,
"avg_line_length": 29.87719298245614,
"alnum_prop": 0.6641221374045801,
"repo_name": "flgiordano/netcash",
"id": "b31608b56596668d1aee5258857c77ce634cfdd5",
"size": "2299",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "+/google-cloud-sdk/lib/surface/dns/record_sets/changes/list.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "622"
},
{
"name": "HTML",
"bytes": "33831"
},
{
"name": "JavaScript",
"bytes": "13859"
},
{
"name": "Shell",
"bytes": "2716"
}
],
"symlink_target": ""
}
|
import os
from functools import partial
import logging
from pathlib import Path
from typing import Union
from contextlib import contextmanager
from mlflow.environment_variables import MLFLOW_TRACKING_AWS_SIGV4
from mlflow.store.tracking import DEFAULT_LOCAL_FILE_AND_ARTIFACT_PATH
from mlflow.store.db.db_types import DATABASE_ENGINES
from mlflow.store.tracking.file_store import FileStore
from mlflow.store.tracking.rest_store import RestStore
from mlflow.tracking._tracking_service.registry import TrackingStoreRegistry
from mlflow.utils import env, rest_utils
from mlflow.utils.file_utils import path_to_local_file_uri
from mlflow.utils.databricks_utils import get_databricks_host_creds
_TRACKING_URI_ENV_VAR = "MLFLOW_TRACKING_URI"
# Extra environment variables which take precedence for setting the basic/bearer
# auth on http requests.
_TRACKING_USERNAME_ENV_VAR = "MLFLOW_TRACKING_USERNAME"
_TRACKING_PASSWORD_ENV_VAR = "MLFLOW_TRACKING_PASSWORD"
_TRACKING_TOKEN_ENV_VAR = "MLFLOW_TRACKING_TOKEN"
# sets verify param of 'requests.request' function
# see https://requests.readthedocs.io/en/master/api/
_TRACKING_INSECURE_TLS_ENV_VAR = "MLFLOW_TRACKING_INSECURE_TLS"
_TRACKING_SERVER_CERT_PATH_ENV_VAR = "MLFLOW_TRACKING_SERVER_CERT_PATH"
# sets cert param of 'requests.request' function
# see https://requests.readthedocs.io/en/master/api/
_TRACKING_CLIENT_CERT_PATH_ENV_VAR = "MLFLOW_TRACKING_CLIENT_CERT_PATH"
_logger = logging.getLogger(__name__)
_tracking_uri = None
def is_tracking_uri_set():
"""Returns True if the tracking URI has been set, False otherwise."""
if _tracking_uri or env.get_env(_TRACKING_URI_ENV_VAR):
return True
return False
def set_tracking_uri(uri: Union[str, Path]) -> None:
"""
Set the tracking server URI. This does not affect the
currently active run (if one exists), but takes effect for successive runs.
:param uri:
- An empty string, or a local file path, prefixed with ``file:/``. Data is stored
locally at the provided file (or ``./mlruns`` if empty).
- An HTTP URI like ``https://my-tracking-server:5000``.
- A Databricks workspace, provided as the string "databricks" or, to use a
Databricks CLI
`profile <https://github.com/databricks/databricks-cli#installation>`_,
"databricks://<profileName>".
- A :py:class:`pathlib.Path` instance
.. code-block:: python
:caption: Example
import mlflow
mlflow.set_tracking_uri("file:///tmp/my_tracking")
tracking_uri = mlflow.get_tracking_uri()
print("Current tracking uri: {}".format(tracking_uri))
.. code-block:: text
:caption: Output
Current tracking uri: file:///tmp/my_tracking
"""
if isinstance(uri, Path):
# On Windows with Python3.8 (https://bugs.python.org/issue38671)
# .resolve() doesn't return the absolute path if the directory doesn't exist
# so we're calling .absolute() first to get the absolute path on Windows,
# then .resolve() to clean the path
uri = uri.absolute().resolve().as_uri()
global _tracking_uri
_tracking_uri = uri
@contextmanager
def _use_tracking_uri(uri: str, local_store_root_path: str = None) -> None:
"""
Similar to `mlflow.tracking.set_tracking_uri` function but return a context manager.
:param uri: tracking URI to use.
:param local_store_root_path: the local store root path for the tracking URI.
"""
global _tracking_uri
cwd = os.getcwd()
old_tracking_uri = _tracking_uri
try:
if local_store_root_path is not None:
os.chdir(local_store_root_path)
_tracking_uri = uri
yield
finally:
_tracking_uri = old_tracking_uri
os.chdir(cwd)
def _resolve_tracking_uri(tracking_uri=None):
return tracking_uri or get_tracking_uri()
def get_tracking_uri() -> str:
"""
Get the current tracking URI. This may not correspond to the tracking URI of
the currently active run, since the tracking URI can be updated via ``set_tracking_uri``.
:return: The tracking URI.
.. code-block:: python
:caption: Example
import mlflow
# Get the current tracking uri
tracking_uri = mlflow.get_tracking_uri()
print("Current tracking uri: {}".format(tracking_uri))
.. code-block:: text
:caption: Output
Current tracking uri: file:///.../mlruns
"""
global _tracking_uri
if _tracking_uri is not None:
return _tracking_uri
elif env.get_env(_TRACKING_URI_ENV_VAR) is not None:
return env.get_env(_TRACKING_URI_ENV_VAR)
else:
return path_to_local_file_uri(os.path.abspath(DEFAULT_LOCAL_FILE_AND_ARTIFACT_PATH))
def _get_file_store(store_uri, **_):
return FileStore(store_uri, store_uri)
def _get_sqlalchemy_store(store_uri, artifact_uri):
from mlflow.store.tracking.sqlalchemy_store import SqlAlchemyStore
if artifact_uri is None:
artifact_uri = DEFAULT_LOCAL_FILE_AND_ARTIFACT_PATH
return SqlAlchemyStore(store_uri, artifact_uri)
def _get_default_host_creds(store_uri):
return rest_utils.MlflowHostCreds(
host=store_uri,
username=os.environ.get(_TRACKING_USERNAME_ENV_VAR),
password=os.environ.get(_TRACKING_PASSWORD_ENV_VAR),
token=os.environ.get(_TRACKING_TOKEN_ENV_VAR),
aws_sigv4=MLFLOW_TRACKING_AWS_SIGV4.get(),
ignore_tls_verification=os.environ.get(_TRACKING_INSECURE_TLS_ENV_VAR) == "true",
client_cert_path=os.environ.get(_TRACKING_CLIENT_CERT_PATH_ENV_VAR),
server_cert_path=os.environ.get(_TRACKING_SERVER_CERT_PATH_ENV_VAR),
)
def _get_rest_store(store_uri, **_):
return RestStore(partial(_get_default_host_creds, store_uri))
def _get_databricks_rest_store(store_uri, **_):
return RestStore(partial(get_databricks_host_creds, store_uri))
_tracking_store_registry = TrackingStoreRegistry()
_tracking_store_registry.register("", _get_file_store)
_tracking_store_registry.register("file", _get_file_store)
_tracking_store_registry.register("databricks", _get_databricks_rest_store)
for scheme in ["http", "https"]:
_tracking_store_registry.register(scheme, _get_rest_store)
for scheme in DATABASE_ENGINES:
_tracking_store_registry.register(scheme, _get_sqlalchemy_store)
_tracking_store_registry.register_entrypoints()
def _get_store(store_uri=None, artifact_uri=None):
return _tracking_store_registry.get_store(store_uri, artifact_uri)
# TODO(sueann): move to a projects utils module
def _get_git_url_if_present(uri):
"""
Return the path git_uri#sub_directory if the URI passed is a local path that's part of
a Git repo, or returns the original URI otherwise.
:param uri: The expanded uri
:return: The git_uri#sub_directory if the uri is part of a Git repo,
otherwise return the original uri
"""
if "#" in uri:
# Already a URI in git repo format
return uri
try:
from git import Repo, InvalidGitRepositoryError, GitCommandNotFound, NoSuchPathError
except ImportError as e:
_logger.warning(
"Failed to import Git (the git executable is probably not on your PATH),"
" so Git SHA is not available. Error: %s",
e,
)
return uri
try:
# Check whether this is part of a git repo
repo = Repo(uri, search_parent_directories=True)
# Repo url
repo_url = "file://%s" % repo.working_tree_dir
# Sub directory
rlpath = uri.replace(repo.working_tree_dir, "")
if rlpath == "":
git_path = repo_url
elif rlpath[0] == "/":
git_path = repo_url + "#" + rlpath[1:]
else:
git_path = repo_url + "#" + rlpath
return git_path
except (InvalidGitRepositoryError, GitCommandNotFound, ValueError, NoSuchPathError):
return uri
|
{
"content_hash": "2c4e7f6235ef658cfe2a0e4582d1eabc",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 97,
"avg_line_length": 35.06550218340611,
"alnum_prop": 0.6718555417185554,
"repo_name": "mlflow/mlflow",
"id": "a996483062dd4cedd867db5d5381ebbfdda852af",
"size": "8030",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mlflow/tracking/_tracking_service/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "24965"
},
{
"name": "Dockerfile",
"bytes": "1206"
},
{
"name": "HTML",
"bytes": "16439"
},
{
"name": "Java",
"bytes": "276538"
},
{
"name": "JavaScript",
"bytes": "3606345"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "6057051"
},
{
"name": "R",
"bytes": "202454"
},
{
"name": "Scala",
"bytes": "39353"
},
{
"name": "Shell",
"bytes": "27246"
},
{
"name": "TSQL",
"bytes": "211"
},
{
"name": "TypeScript",
"bytes": "313772"
}
],
"symlink_target": ""
}
|
"""
Author
------
Bo Zhang
Email
-----
bozhang@nao.cas.cn
Created on
----------
- Wed Jan 4 14:00:00 2016
Modifications
-------------
-
Aims
----
- SONG echelle spectra extractor
"""
import numpy as np
from twodspec import ccdproc_mod as ccdproc
def produce_master(t, method="median", imagetp='FLAT', slc=None,
ccdread_unit='adu'):
""" a method to process master frames
Parameters
----------
t: astropy.table.Table
the table of SONG observation
method:
the method adopted when combining frames
imagetp: string
{'BIAS', 'FLAT', 'FLATI2', 'THAR', 'THARI2'}
slc: slice
to denote the fraction of being used
ccdread_unit: string/unit
default is 'adu'
Returns
-------
mst: ccdproc.CCDData
the (combined) master frame
"""
assert method in {'average', 'median'}
# 1. produce ind of master frames
ind_mst = np.where(t['IMAGETYP'] == imagetp)[0]
# check for positive number of master frames
try:
assert len(ind_mst) > 0
except AssertionError:
raise IOError("There is no image of type %s!" % imagetp)
# in default, combine all masters available
if slc is not None:
# combine a fraction of masters available
assert isinstance(slc, slice)
ind_mst = ind_mst[slc]
# check for positive number of master frames
try:
assert len(ind_mst) > 0
except AssertionError:
raise IOError("There is no image of type %s! (slice is bad)" % imagetp)
# 2. read master frames
print("@SONG: trying really hard to produce the final %s frame!" % imagetp)
for _ in t['fps'][ind_mst]:
print("+ %s" % _)
fps = ','.join(t['fps'][ind_mst])
if len(ind_mst) == 1:
mst = ccdproc.CCDData.read(fps, unit=ccdread_unit)
else:
mst = ccdproc.combine(fps, unit=ccdread_unit, method=method)
return mst
|
{
"content_hash": "fa8f3666c4a942e556a859391db81ecd",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 79,
"avg_line_length": 22.776470588235295,
"alnum_prop": 0.5976239669421488,
"repo_name": "hypergravity/hrs",
"id": "dea45bd52cfa26a93181587c0654ace265b32a78",
"size": "1960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "song/extract.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1226236"
},
{
"name": "Jupyter Notebook",
"bytes": "763194"
},
{
"name": "Matlab",
"bytes": "1682"
},
{
"name": "Python",
"bytes": "155168"
}
],
"symlink_target": ""
}
|
import copy
import math
import unittest
import paddle.fluid as fluid
import paddle.fluid.layers as layers
import paddle.fluid.framework as framework
import paddle.fluid.core as core
def exponential_decay(learning_rate,
global_step,
decay_steps,
decay_rate,
staircase=False):
exponent = global_step / decay_steps
if staircase:
exponent = math.floor(exponent)
return learning_rate * decay_rate**exponent
def natural_exp_decay(learning_rate,
global_step,
decay_steps,
decay_rate,
staircase=False):
exponent = float(global_step) / float(decay_steps)
if staircase:
exponent = math.floor(exponent)
return learning_rate * math.exp(-1 * decay_rate * exponent)
def inverse_time_decay(learning_rate,
global_step,
decay_steps,
decay_rate,
staircase=False):
temp = float(global_step) / float(decay_steps)
if staircase:
temp = math.floor(temp)
return learning_rate / (1 + decay_rate * temp)
def polynomial_decay(learning_rate,
global_step,
decay_steps,
end_learning_rate=0.0001,
power=1.0,
cycle=False):
if cycle:
div = math.ceil(global_step / float(decay_steps))
if div == 0:
div = 1
decay_steps = decay_steps * div
else:
global_step = min(global_step, decay_steps)
return (learning_rate - end_learning_rate) * \
((1 - float(global_step) / float(decay_steps)) ** power) + end_learning_rate
def piecewise_decay(global_step, boundaries, values):
assert len(boundaries) + 1 == len(values)
for i in range(len(boundaries)):
if global_step < boundaries[i]:
return values[i]
return values[len(values) - 1]
class TestLearningRateDecay(unittest.TestCase):
def check_decay(self, python_decay_fn, fluid_decay_fn, kwargs):
places = [fluid.CPUPlace()]
if core.is_compiled_with_cuda():
places.append(fluid.CUDAPlace(0))
for place in places:
self.check_decay_with_place(place, python_decay_fn, fluid_decay_fn,
kwargs)
def check_decay_with_place(self, place, python_decay_fn, fluid_decay_fn,
kwargs):
decayed_lr = fluid_decay_fn(**kwargs)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
fluid.memory_optimize(fluid.default_main_program())
for step in range(10):
lr_val, = exe.run(fluid.default_main_program(),
feed={},
fetch_list=[decayed_lr])
python_decayed_lr = python_decay_fn(
global_step=float(step), **kwargs)
self.assertAlmostEqual(
python_decayed_lr,
lr_val[0],
msg='Failed fn is {0}, Python result is {1}, Fluid result is {2}'.
format(python_decay_fn.__name__,
str(python_decayed_lr), str(lr_val[0])))
def test_decay(self):
common_kwargs_true = {
"learning_rate": 1.0,
"decay_steps": 5,
"decay_rate": 0.5,
"staircase": True
}
common_kwargs_false = copy.deepcopy(common_kwargs_true)
common_kwargs_false["staircase"] = False
decay_fns = [
(exponential_decay, layers.exponential_decay, common_kwargs_true),
(exponential_decay, layers.exponential_decay, common_kwargs_false),
(natural_exp_decay, layers.natural_exp_decay, common_kwargs_true),
(natural_exp_decay, layers.natural_exp_decay, common_kwargs_false),
(inverse_time_decay, layers.inverse_time_decay, common_kwargs_true),
(inverse_time_decay, layers.inverse_time_decay,
common_kwargs_false),
(polynomial_decay, layers.polynomial_decay, {
"learning_rate": 1.0,
"decay_steps": 5,
"cycle": True
}),
(polynomial_decay, layers.polynomial_decay, {
"learning_rate": 1.0,
"decay_steps": 5,
"cycle": False
}),
(piecewise_decay, layers.piecewise_decay, {
"boundaries": [3, 6, 9],
"values": [0.1, 0.2, 0.3, 0.4]
}),
]
for py_decay_fn, fluid_decay_fn, kwargs in decay_fns:
print("decay_fn=" + py_decay_fn.__name__ + " kwargs=" + str(kwargs))
main_program = framework.Program()
startup_program = framework.Program()
with framework.program_guard(main_program, startup_program):
self.check_decay(py_decay_fn, fluid_decay_fn, kwargs)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "54a9d6962f28c9c7cadc595a2e5e8403",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 87,
"avg_line_length": 35.157534246575345,
"alnum_prop": 0.5375024352230664,
"repo_name": "putcn/Paddle",
"id": "6382e290eb30c621da64d5c600be6d8a7c6254f1",
"size": "5743",
"binary": false,
"copies": "4",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/test_learning_rate_scheduler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "272910"
},
{
"name": "C++",
"bytes": "7598375"
},
{
"name": "CMake",
"bytes": "269313"
},
{
"name": "Cuda",
"bytes": "1078779"
},
{
"name": "Go",
"bytes": "109501"
},
{
"name": "Perl",
"bytes": "11456"
},
{
"name": "Python",
"bytes": "3637137"
},
{
"name": "Shell",
"bytes": "157071"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from server import views
from bookreview import views
urlpatterns = patterns('',
# Examples:
url(r'^$', 'bookreview.views.index', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
{
"content_hash": "668821116bcede67b8c70be4928734d3",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 54,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.6837349397590361,
"repo_name": "iscarecrow/sb",
"id": "d7f7bb2785150e2ae2226ddde138f03281ddb71e",
"size": "332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13787"
},
{
"name": "JavaScript",
"bytes": "421650"
},
{
"name": "Python",
"bytes": "3741"
}
],
"symlink_target": ""
}
|
import odbpy as odb
import os
current_dir = os.path.dirname(os.path.realpath(__file__))
tests_dir = os.path.abspath(os.path.join(current_dir, os.pardir))
opendb_dir = os.path.abspath(os.path.join(tests_dir, os.pardir))
data_dir = os.path.join(tests_dir, "data")
db = odb.dbDatabase.create()
odb.read_lef(db, os.path.join(data_dir, "gscl45nm.lef"))
odb.read_def(db, os.path.join(data_dir, "design.def"))
chip = db.getChip()
block = chip.getBlock()
nets = block.getNets()
for net in nets:
net.getName()
assert len(nets) == 24, "Error: nets not found"
|
{
"content_hash": "05376b445445f76adb3dfcd24f72e292",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 65,
"avg_line_length": 32.705882352941174,
"alnum_prop": 0.7014388489208633,
"repo_name": "QuantamHD/OpenROAD",
"id": "9b7bd3a8d6fef5b614a68af362518c62fe3e6400",
"size": "556",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/odb/test/python/07-dump_nets_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "679482"
},
{
"name": "C++",
"bytes": "18484938"
},
{
"name": "CMake",
"bytes": "148464"
},
{
"name": "Cuda",
"bytes": "7441"
},
{
"name": "Dockerfile",
"bytes": "3754"
},
{
"name": "Python",
"bytes": "245126"
},
{
"name": "Ruby",
"bytes": "541"
},
{
"name": "SWIG",
"bytes": "315251"
},
{
"name": "Shell",
"bytes": "39400"
},
{
"name": "Tcl",
"bytes": "1771643"
},
{
"name": "Verilog",
"bytes": "51524137"
},
{
"name": "Yacc",
"bytes": "496743"
}
],
"symlink_target": ""
}
|
"""
A fairly complicated migration taken from the real world, mangled so as not to
disclose much about the meaning of the models / fields.
"""
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
pass # dummy
def backwards(self, orm):
pass # dummy
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75'})
},
'app_beta.model01': {
'Meta': {'object_name': 'Model01'},
'field001': ('django.db.models.fields.CharField', [], {'default': "'blabla'", 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field002': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field003': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'field004': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'})
},
'app_beta.model02': {
'Meta': {'object_name': 'Model02'},
'field005': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field006': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field007': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'field008': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'field009': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field010': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'app_beta.model03': {
'Meta': {'object_name': 'Model03', '_ormbases': ['app_beta.Model07']},
'field011': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['app_beta.PressRelease']", 'unique': 'True', 'primary_key': 'True'})
},
'app_beta.model04': {
'Meta': {'object_name': 'Model04'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field012': ('django.db.models.fields.DateField', [], {}),
'field013': ('django.db.models.fields.CharField', [], {'max_length': '120', 'blank': 'True'}),
'field014': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'field015': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'app_beta.model05': {
'Meta': {'ordering': "['asdf', 'qwer']", 'unique_together': "(['content_type', 'object_id'],)", 'object_name': 'Model05'},
'field016': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'qwer': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'asdf': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field017': ('django.db.models.fields.PositiveIntegerField', [], {}),
'field018': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'app_beta.model06': {
'Meta': {'ordering': "('dpdpd', '-qppqwewje')", 'unique_together': "(('content_type', 'object_id'),)", 'object_name': 'Model06'},
'qppqwewje': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field019': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field020': ('django.db.models.fields.PositiveIntegerField', [], {}),
'dpdpd': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'field021': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'app_beta.model07': {
'Meta': {'ordering': "('-aerowerowe',)", 'object_name': 'Model07'},
'field022': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field023': ('django.db.models.fields.TextField', [], {}),
'field024': ('django.db.models.fields.PositiveIntegerField', [], {}),
'aerowerowe': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'app_beta.model07a': {
'Meta': {'object_name': 'Model07a'},
'field025': ('django.db.models.fields.TextField', [], {}),
'field026': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field027': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '160'}),
'field028': ('django.db.models.fields.CharField', [], {'max_length': '160'})
},
'app_beta.model08': {
'Meta': {'object_name': 'Model08'},
'field029': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field030': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field032': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field034': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field035': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field036': ('django.db.models.fields.DecimalField', [], {'default': "'12.34'"}),
'field037': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('pqpwpw',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'pqpwpw': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'app_delta.model09': {
'Meta': {'object_name': 'Model09'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field038': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'app_delta.model11': {
'Meta': {'ordering': "('id',)", 'object_name': 'Model11'},
'field039': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'field040': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_delta.Model11']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'field041': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field042': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field043': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field044': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'field045': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field046': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'}),
'field047': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'asdf'", 'null': 'True', 'to': "orm['app_delta.Model11']"}),
'field048': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'field049': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '150'}),
'field050': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'asfdgasf'", 'symmetrical': 'False', 'to': "orm['app_delta.Model13']"})
},
'app_delta.model12': {
'Meta': {'object_name': 'Model12'},
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'field051': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'app_delta.model13': {
'Meta': {'ordering': "('id',)", 'object_name': 'Model13'},
'field052': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'field053': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field054': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field055': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'field056': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'field057': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'field058': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field059': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '120'})
},
'app_zeta.Model14': { # BAD: should be lower case
'Meta': {'ordering': "['asdf']", 'object_name': 'Model14'},
'field060': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field061': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'field062': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field063': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field064': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field065': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'field066': ('django.db.models.fields.CharField', [], {'default': "'qwerkmnmsfoa'", 'max_length': '50'}),
'field067': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field068': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '7', 'decimal_places': '2'}),
'field069': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'fgasdf'", 'null': 'True', 'to': "orm['app_epsilon.Model10']"}),
'field070': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field071': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'field072': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'gsdfg'", 'blank': 'True', 'to': "orm['app_epsilon.Model10']"}),
'field073': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'field074': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'field075': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field076': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field077': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '8'}),
'field078': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '11', 'decimal_places': '8'}),
'field079': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field080': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field081': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'field082': ('django.db.models.fields.DecimalField', [], {'default': "'123.00'", 'max_digits': '5', 'decimal_places': '2'}),
'field083': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'asdf': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'field085': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field086': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field087': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field088': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'gsgfadasda'", 'null': 'True', 'to': "orm['app_lambda.Model25']"}),
'field089': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'field090': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'field091': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'field092': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'field093': ('django.db.models.fields.DecimalField', [], {'default': "'12.00'", 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'field094': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field095': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field096': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field097': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'field098': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field099': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field100': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field101': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'afsdggs'", 'null': 'True', 'to': "orm['app_lambda.Model25']"}),
'field102': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'field103': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field104': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'field105': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'field106': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'app_zeta.model15': {
'Meta': {'ordering': "['qpwepzldn']", 'object_name': 'Model15'},
'field107': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field108': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'field109': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_zeta.Model14']"}),
'field110': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field111': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field112': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field113': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field114': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'qpwepzldn': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field115': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'field116': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'field117': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'field118': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'field119': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'field120': ('django.db.models.fields.CharField', [], {'default': "'asdfasdfa'", 'max_length': '100', 'blank': 'True'}),
'field121': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True'})
},
'app_gamma.model16': {
'Meta': {'object_name': 'Model16'},
'field122': ('django.db.models.fields.related.OneToOneField', [], {'default': "orm['app_gamma.Model18']", 'related_name': "'dfgasdfa'", 'unique': 'True', 'to': "orm['app_gamma.Model18']"}),
'field123': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field124': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field125': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_beta.Model01']", 'null': 'True'}),
'field126': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'app_gamma.model17': {
'Meta': {'object_name': 'Model17'},
'field127': ('django.db.models.fields.CharField', [], {'max_length': '5000', 'blank': 'True'}),
'field128': ('django.db.models.fields.CharField', [], {'max_length': '5000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field129': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'app_gamma.model18': {
'Meta': {'object_name': 'Model18'},
'field130': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'field131': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field132': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field133': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field134': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field135': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field136': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field137': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'field138': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'field139': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field140': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field141': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field142': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field143': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field144': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'field145': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'field146': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['app_theta.Model24']", 'symmetrical': 'False', 'blank': 'True'})
},
'app_gamma.model19': {
'Meta': {'ordering': "['-id']", 'object_name': 'Model19'},
'field147': ('django.db.models.fields.CharField', [], {'max_length': '5000', 'blank': 'True'}),
'field148': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'field149': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'asdfasdf'", 'unique': 'True', 'to': "orm['app_gamma.Model16']"}),
'field150': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdfasd'", 'to': "orm['app_iota.Model21']"}),
'field151': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fgasdffs'", 'to': "orm['app_iota.Model22']"}),
'field152': ('django.db.models.fields.DateTimeField', [], {}),
'field153': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'field154': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdfadfa'", 'to': "orm['app_zeta.Model15']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field155': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'field156': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'field157': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_beta.Model01']", 'null': 'True'}),
'field158': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'field159': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_gamma.Model17']"}),
'field160': ('django.db.models.fields.DateTimeField', [], {}),
'field161': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'app_epsilon.model10': {
'Meta': {'ordering': "('weopot',)", 'object_name': 'Model10'},
'field166': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field167': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field168': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'ewiasdf'", 'symmetrical': 'False', 'to': "orm['app_alpha.Model26']"}),
'field169': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'asfasdf'", 'symmetrical': 'False', 'to': "orm['app_delta.Model11']"}),
'field170': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'weopot': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'field171': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_delta.Model12']", 'null': 'True'}),
'field172': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'app_iota.model21': {
'Meta': {'object_name': 'Model21'},
'field173': ('django.db.models.fields.DateField', [], {}),
'field174': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'field175': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field176': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'field177': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'field178': ('django.db.models.fields.CharField', [], {'default': "'erqwer'", 'max_length': '100', 'blank': 'True'})
},
'app_iota.model22': {
'Meta': {'object_name': 'Model22'},
'field179': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'field180': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field181': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'field182': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field183': ('django.db.models.fields.CharField', [], {'default': "'eqerwe'", 'max_length': '50'}),
'field184': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'field185': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'field186': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field187': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field188': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field189': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'field190': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field191': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'asdfasdf'", 'null': 'True', 'to': "orm['app_zeta.Model15']"}),
'field192': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field193': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'asdfadf'", 'null': 'True', 'to': "orm['app_zeta.Model15']"}),
'field194': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True'})
},
'app_iota.model23': {
'Meta': {'unique_together': "(('profile', 'gender'),)", 'object_name': 'Model23'},
'field195': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field196': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field197': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'field198': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'field199': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field200': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field201': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True'}),
'field202': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'field203': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field204': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field205': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field206': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field207': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'field208': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_delta.Model27']", 'null': 'True'}),
'field209': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'field210': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_iota.Model22']", 'null': 'True'}),
'field211': ('django.db.models.fields.CharField', [], {'default': "'ct'", 'max_length': '2'}),
'field212': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'app_theta.model24': {
'Meta': {'object_name': 'Model24'},
'field213': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field214': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'field215': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'app_lambda.model25': {
'Meta': {'ordering': "['eqeoriwe']", 'object_name': 'Model25'},
'field216': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field217': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'field218': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'field219': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field220': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field221': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'field222': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'eqeoriwe': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field223': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'asfasdfas'", 'null': 'True', 'to': "orm['app_lambda.Model25']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True'})
},
'app_alpha.model26': {
'Meta': {'object_name': 'Model26'},
'field224': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['app_delta.Model09']", 'unique': 'True', 'null': 'True'}),
'field225': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field226': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field227': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'field228': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['app_delta.Model13']", 'symmetrical': 'False'})
},
'app_delta.model27': {
'Meta': {'ordering': '()', 'object_name': 'Model27'},
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'field229': ('django.db.models.fields.CharField', [], {'max_length': '120'})
},
'app_mu.model28': {
'Meta': {'ordering': "['-qowopwow']", 'object_name': 'Model28'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field230': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sadfvvz'", 'to': "orm['app_nu.Model30']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'qowopwow': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'app_nu.model30': {
'Meta': {'ordering': "['-qobajhjer']", 'object_name': 'Model30'},
'field231': ('django.db.models.fields.CharField', [], {'max_length': '100', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'field232': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field233': ('django.db.models.fields.IntegerField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'field234': ('django.db.models.fields.IntegerField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'field235': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field236': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'field237': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'field238': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sadfdsf'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['app_mu.Model28']"}),
'field240': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'adfasdd'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['app_nu.Model29']"}),
'field241': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdfafw'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['app_alpha.Model53']"}),
'field242': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field243': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model34']", 'null': 'True'}),
'field244': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdftrqewrj'", 'null': 'True', 'to': "orm['app_nu.Model30']"}),
'field245': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pasdfadsoj'", 'null': 'True', 'to': "orm['app_nu.Model38']"}),
'field246': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'field247': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'field248': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'qobajhjer': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field249': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'field250': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_nu.Model39']", 'null': 'True'}),
'field251': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdfzwqaeij'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['app_alpha.Model53']"})
},
'app_nu.model29': {
'Meta': {'ordering': "['-qobajhjer']", 'object_name': 'Model29'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field252': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'field253': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'qobajhjer': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field254': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'casdpoadfaj'", 'to': "orm['app_nu.Model30']"})
},
'app_kappa.model31': {
'Meta': {'object_name': 'Model31'},
'field255': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field256': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'field257': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'app_kappa.model32': {
'Meta': {'object_name': 'Model32'},
'field257': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field258': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'field259': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_zeta.Model14']", 'null': 'True', 'blank': 'True'}),
'field260': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': "'True'"}),
'field261': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field262': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'field263': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'field264': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field265': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field266': ('django.db.models.fields.CharField', [], {'default': "'qwqed'", 'max_length': '100'})
},
'app_kappa.model33': {
'Meta': {'object_name': 'Model33'},
'field267': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'field268': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field269': ('django.db.models.fields.CharField', [], {'default': "'erqwer'", 'max_length': '50'}),
'field270': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field271': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field272': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'field273': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'field274': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field275': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field276': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'field277': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'field278': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field279': ('django.db.models.fields.CharField', [], {'default': "'gfdsg'", 'max_length': '50'}),
'field280': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True'}),
'field281': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model32']", 'null': 'True'}),
'field282': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True'}),
'field283': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field284': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'field285': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'field286': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'field287': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field288': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field289': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'field290': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'field291': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'field292': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_iota.Model22']", 'null': 'True'}),
'field293': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'db_index': 'True'}),
'field294': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'field295': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'field296': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field297': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '2'})
},
'app_kappa.model34': {
'Meta': {'ordering': "['id']", 'object_name': 'Model34'},
'field298': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_epsilon.Model10']", 'null': 'True'}),
'field299': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field300': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'blank': 'True'}),
'field301': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'field302': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field303': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field304': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '5', 'decimal_places': '2'}),
'field305': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field306': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'field307': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field308': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'field309': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'field310': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'field311': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field312': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field313': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field314': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'field315': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'field316': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True'}),
'field317': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True'}),
'field318': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field319': ('django.db.models.fields.NullBooleanField', [], {'default': 'True', 'null': 'True', 'blank': 'True'}),
'field320': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field321': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model33']", 'null': 'True'}),
'field322': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model36']", 'null': 'True'}),
'field323': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['app_iota.Model23']", 'unique': 'True'}),
'field324': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'field325': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model35']"}),
'field326': ('django.db.models.fields.IntegerField', [], {}),
'field327': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True'}),
'field328': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'field329': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'app_kappa.model36': {
'Meta': {'object_name': 'Model36'},
'field330': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field331': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model33']", 'null': 'True'}),
'field332': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field333': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field334': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field335': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field336': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'field337': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field338': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field339': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'field340': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'field341': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model37']", 'null': 'True'}),
'field342': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'field343': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'})
},
'app_kappa.model35': {
'Meta': {'object_name': 'Model35'},
'field344': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model31']", 'blank': 'True'}),
'field345': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field346': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'field347': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'field348': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'field349': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'app_kappa.model37': {
'Meta': {'object_name': 'Model37'},
'field350': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field351': ('django.db.models.fields.DecimalField', [], {'max_digits': '5', 'decimal_places': '2'}),
'field352': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'field353': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field354': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'field355': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'app_nu.model38': {
'Meta': {'object_name': 'Model38'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field356': ('django.db.models.fields.CharField', [], {'default': "'';daflsdfpasd'", 'unique': 'True', 'max_length': '100'})
},
'app_nu.model39': {
'Meta': {'object_name': 'Model39'},
'field357': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'field358': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field359': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lasdfasdisdf'", 'null': 'True', 'to': "orm['app_nu.Model40']"}),
'field360': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_kappa.Model36']"}),
'field361': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'field362': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'field363': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'qobajhjer': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field364': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
},
'app_nu.model40': {
'Meta': {'ordering': "['-qobajhjer']", 'object_name': 'Model40'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field365': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'field366': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'qobajhjer': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field367': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdfpoapsdf'", 'to': "orm['app_nu.Model39']"})
},
'app_alpha.model50': {
'Meta': {'unique_together': "(('row', 'col', 'group'),)", 'object_name': 'Model50'},
'field368': ('django.db.models.fields.IntegerField', [], {}),
'field369': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdifjaisj'", 'to': "orm['app_alpha.Model53']"}),
'field370': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asdfasdiaja'", 'to': "orm['app_alpha.Model51']"}),
'field371': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field372': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'app_alpha.model51': {
'Meta': {'ordering': "('-qowopwow',)", 'object_name': 'Model51'},
'field373': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'field374': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'adjifasdfaisd'", 'to': "orm['app_alpha.Model52']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field375': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model54']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'qowopwow': ('django.db.models.fields.DateTimeField', [], {})
},
'app_alpha.model52': {
'Meta': {'ordering': "('id',)", 'object_name': 'Model52'},
'field377': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field378': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30', 'blank': 'True'}),
'field379': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field380': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field381': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model54']"}),
'qowopwow': ('django.db.models.fields.DateTimeField', [], {}),
'field382': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model51']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'field383': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model56']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'app_alpha.model53': {
'Meta': {'ordering': "('-eqrewrq',)", 'object_name': 'Model53'},
'field384': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'field385': ('django.db.models.fields.IntegerField', [], {}),
'eqrewrq': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'iasjdifsdj'", 'to': "orm['app_alpha.Model52']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field386': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model58']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'field387': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model57']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'field388': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'field389': ('django.db.models.fields.IntegerField', [], {}),
'field390': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'eijsadfiasdjf'", 'to': "orm['app_nu.Model30']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'field391': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'field392': ('django.db.models.fields.PositiveIntegerField', [], {}),
'field393': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model50']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'app_alpha.model54': {
'Meta': {'object_name': 'Model54'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field394': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'field395': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'field396': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'app_alpha.model55': {
'Meta': {'ordering': "('-qowopwow',)", 'object_name': 'Model55'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field397': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model54']"}),
'qowopwow': ('django.db.models.fields.DateTimeField', [], {})
},
'app_alpha.model56': {
'Meta': {'ordering': "('-qowopwow',)", 'object_name': 'Model56'},
'field398': ('django.db.models.fields.CharField', [], {'max_length': '11'}),
'field399': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asifjasdifj'", 'to': "orm['app_alpha.Model52']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field400': ('django.db.models.fields.FloatField', [], {}),
'field401': ('picklefield.fields.PickledObjectField', [], {}),
'field402': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['app_alpha.Model54']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'qowopwow': ('django.db.models.fields.DateTimeField', [], {})
},
'app_alpha.model57': {
'Meta': {'object_name': 'Model57'},
'field403': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '4'}),
'field404': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '4'}),
'field405': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'asiddiafj'", 'to': "orm['app_alpha.Model53']"}),
'field406': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'field407': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'field408': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'iajsdfasid'", 'null': 'True', 'to': "orm['app_alpha.Model55']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'field409': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'field410': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cwefaef'", 'null': 'True', 'to': "orm['app_alpha.Model56']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'app_alpha.model58': {
'Meta': {'ordering': "('-qowopwow',)", 'object_name': 'Model58'},
'field411': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'eaifjuasdj'", 'to': "orm['app_alpha.Model53']"}),
'field412': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'qowopwow': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
}
}
complete_apps = ['app_beta', 'app_gamma']
|
{
"content_hash": "c6a8057fce6b9984154cc29a0f2e39b3",
"timestamp": "",
"source": "github",
"line_count": 688,
"max_line_length": 201,
"avg_line_length": 87.99273255813954,
"alnum_prop": 0.5431209633459423,
"repo_name": "cberzan/django-anger",
"id": "5db3719ed1e2c7239d218d1095918160e3d2fab7",
"size": "60564",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testdata/bad_migration_bad_model_name_1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "614151"
}
],
"symlink_target": ""
}
|
"""Support for getting collected information from PVOutput."""
import logging
from collections import namedtuple
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.components.rest.sensor import RestData
from homeassistant.const import (
ATTR_TEMPERATURE,
CONF_API_KEY,
CONF_NAME,
ATTR_DATE,
ATTR_TIME,
ATTR_VOLTAGE,
)
_LOGGER = logging.getLogger(__name__)
_ENDPOINT = "http://pvoutput.org/service/r2/getstatus.jsp"
ATTR_ENERGY_GENERATION = "energy_generation"
ATTR_POWER_GENERATION = "power_generation"
ATTR_ENERGY_CONSUMPTION = "energy_consumption"
ATTR_POWER_CONSUMPTION = "power_consumption"
ATTR_EFFICIENCY = "efficiency"
CONF_SYSTEM_ID = "system_id"
DEFAULT_NAME = "PVOutput"
DEFAULT_VERIFY_SSL = True
SCAN_INTERVAL = timedelta(minutes=2)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_SYSTEM_ID): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the PVOutput sensor."""
name = config.get(CONF_NAME)
api_key = config.get(CONF_API_KEY)
system_id = config.get(CONF_SYSTEM_ID)
method = "GET"
payload = auth = None
verify_ssl = DEFAULT_VERIFY_SSL
headers = {"X-Pvoutput-Apikey": api_key, "X-Pvoutput-SystemId": system_id}
rest = RestData(method, _ENDPOINT, auth, headers, payload, verify_ssl)
rest.update()
if rest.data is None:
_LOGGER.error("Unable to fetch data from PVOutput")
return False
add_entities([PvoutputSensor(rest, name)], True)
class PvoutputSensor(Entity):
"""Representation of a PVOutput sensor."""
def __init__(self, rest, name):
"""Initialize a PVOutput sensor."""
self.rest = rest
self._name = name
self.pvcoutput = None
self.status = namedtuple(
"status",
[
ATTR_DATE,
ATTR_TIME,
ATTR_ENERGY_GENERATION,
ATTR_POWER_GENERATION,
ATTR_ENERGY_CONSUMPTION,
ATTR_POWER_CONSUMPTION,
ATTR_EFFICIENCY,
ATTR_TEMPERATURE,
ATTR_VOLTAGE,
],
)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self.pvcoutput is not None:
return self.pvcoutput.energy_generation
return None
@property
def device_state_attributes(self):
"""Return the state attributes of the monitored installation."""
if self.pvcoutput is not None:
return {
ATTR_ENERGY_GENERATION: self.pvcoutput.energy_generation,
ATTR_POWER_GENERATION: self.pvcoutput.power_generation,
ATTR_ENERGY_CONSUMPTION: self.pvcoutput.energy_consumption,
ATTR_POWER_CONSUMPTION: self.pvcoutput.power_consumption,
ATTR_EFFICIENCY: self.pvcoutput.efficiency,
ATTR_TEMPERATURE: self.pvcoutput.temperature,
ATTR_VOLTAGE: self.pvcoutput.voltage,
}
def update(self):
"""Get the latest data from the PVOutput API and updates the state."""
try:
self.rest.update()
self.pvcoutput = self.status._make(self.rest.data.split(","))
except TypeError:
self.pvcoutput = None
_LOGGER.error("Unable to fetch data from PVOutput. %s", self.rest.data)
|
{
"content_hash": "fb97188326ff171e93484c8ff946f100",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 83,
"avg_line_length": 31.16393442622951,
"alnum_prop": 0.63387690689111,
"repo_name": "Cinntax/home-assistant",
"id": "90084ab799913dd8b0ce77be6126b180d00d2b4e",
"size": "3802",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/pvoutput/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17374056"
},
{
"name": "Shell",
"bytes": "6792"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
from rest_framework import routers
from django_usda.modelviewsets import FoodViewSet, FoodGroupViewSet, FoodLanguaLFactorViewSet, LanguaLFactorViewSet, NutrientDataViewSet, NutrientViewSet, SourceViewSet, DerivationViewSet, WeightViewSet, FootnoteViewSet, DataLinkViewSet, DataSourceViewSet, FoodInfoViewSet
from django.contrib import admin
admin.autodiscover()
router = routers.DefaultRouter()
router.register(r'foods', FoodViewSet)
router.register(r'foodgroups', FoodGroupViewSet)
router.register(r'foodlangualfactors', FoodLanguaLFactorViewSet)
router.register(r'langualfactors', LanguaLFactorViewSet)
router.register(r'nutrientdatas', NutrientDataViewSet)
router.register(r'nutrients', NutrientViewSet)
router.register(r'sources', SourceViewSet)
router.register(r'derivations', DerivationViewSet)
router.register(r'weights', WeightViewSet)
router.register(r'footnotes', FootnoteViewSet)
router.register(r'datalinks', DataLinkViewSet)
router.register(r'datasources', DataSourceViewSet)
router.register(r'foodinfo', FoodInfoViewSet)
urlpatterns = patterns('',
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^admin/', include(admin.site.urls)),
url(r'^demo/', 'demo.views.index'),
)
|
{
"content_hash": "1b9d7e112424789c54c314a79704d4d5",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 272,
"avg_line_length": 48.57142857142857,
"alnum_prop": 0.7867647058823529,
"repo_name": "Zundrium/django-usda-demo",
"id": "d5c1f919258bf7fcccd35a42e9ca849a5cfff470",
"size": "1360",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django-usda-demo/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4934"
}
],
"symlink_target": ""
}
|
import sys
sys.path.insert(1, "../../../")
import h2o, tests
import numpy as np
from sklearn.cluster import KMeans
from sklearn.preprocessing import Imputer
def benignKmeans():
# Connect to a pre-existing cluster
# connect to localhost:54321
# Log.info("Importing benign.csv data...\n")
benign_h2o = h2o.import_file(path=tests.locate("smalldata/logreg/benign.csv"))
#benign_h2o.summary()
benign_sci = np.genfromtxt(tests.locate("smalldata/logreg/benign.csv"), delimiter=",")
# Impute missing values with column mean
imp = Imputer(missing_values='NaN', strategy='mean', axis=0)
benign_sci = imp.fit_transform(benign_sci)
# Log.info(paste("H2O K-Means with ", i, " clusters:\n", sep = ""))
for i in range(1,7):
benign_h2o_km = h2o.kmeans(x=benign_h2o, k=i)
print "H2O centers"
print benign_h2o_km.centers()
benign_sci_km = KMeans(n_clusters=i, init='k-means++', n_init=1)
benign_sci_km.fit(benign_sci)
print "sckit centers"
print benign_sci_km.cluster_centers_
if __name__ == "__main__":
tests.run_test(sys.argv, benignKmeans)
|
{
"content_hash": "93f21f7c45e419ef1ffa39a7954fba23",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 90,
"avg_line_length": 32.51428571428571,
"alnum_prop": 0.6511423550087874,
"repo_name": "kyoren/https-github.com-h2oai-h2o-3",
"id": "98bbaf2e04e72eb156aa6639f24687e69433b132",
"size": "1138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_algos/kmeans/pyunit_benignKmeans.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5090"
},
{
"name": "CSS",
"bytes": "162402"
},
{
"name": "CoffeeScript",
"bytes": "262107"
},
{
"name": "Emacs Lisp",
"bytes": "8927"
},
{
"name": "HTML",
"bytes": "139398"
},
{
"name": "Java",
"bytes": "5567457"
},
{
"name": "JavaScript",
"bytes": "38932"
},
{
"name": "Makefile",
"bytes": "34048"
},
{
"name": "Python",
"bytes": "2179689"
},
{
"name": "R",
"bytes": "1677531"
},
{
"name": "Rebol",
"bytes": "7059"
},
{
"name": "Ruby",
"bytes": "3506"
},
{
"name": "Scala",
"bytes": "16346"
},
{
"name": "Shell",
"bytes": "45182"
},
{
"name": "TeX",
"bytes": "546032"
}
],
"symlink_target": ""
}
|
import numpy as np
import random
import json
import math
neuron_decay=0.9
maxneuronsperunit=64
maxaxonsperunit=128
maxunits=10
binary_io=True
bitsize=8
runtime=20
testsize=20
class neuron:
id=-1
myunit=-1
active=False
can_mutate=True
threshold=999
amount=0
decay=neuron_decay
downstream_axons=[]
upstream_axons=[]
def __init__(self,id,threshold):
self.id=id
self.threshold=threshold
def check(self):
global units
if self.amount>=self.threshold and self.active:
for x in self.downstream_axons:
if x>-1:
units[self.myunit].axons[x].fire()
self.amount=self.amount*self.decay
class axon:
id=-1
myunit=-1
active=False
fireamount=0
upstream_neuron=-1
downstream_neuron=-1
def __init__(self,id):
self.id=id
def fire(self):
global units
units[self.myunit].neurons[self.downstream_neuron].amount=units[self.myunit].neurons[self.downstream_neuron].amount+self.fireamount
#print "AXON "+str(self.id)+" IS FIRING WITH "+str(self.fireamount)
return True
class unit:
id=-1
active=False
active_neurons=0
active_axons=0
input_neurons=[]
output_neurons=[]
neurons=[neuron(i,999) for i in range(maxneuronsperunit)]
axons=[axon(i) for i in range(maxaxonsperunit)]
def __init__(self,id):
self.id=id
def add_neuron(self,threshold):
a=0
b=-1
while a<maxneuronsperunit:
if self.neurons[a].active==False:
b=a
self.active_neurons=self.active_neurons+1
a=maxneuronsperunit
a=a+1
self.neurons[b].active=True
self.neurons[b].myunit=self.id
self.neurons[b].threshold=threshold
return b
def add_n_neurons(self,n,threshold):
a=0
while a<n:
self.add_neuron(threshold)
a=a+1
def remove_neuron(self,n):
if self.neurons[n].active==True:
self.neurons[n].active=False
self.neurons[n].amount=0
self.neurons[n].threshold=999
self.active_neurons=self.active_neurons-1
def connect(self,a,b,amount):
if self.neurons[a].active and self.neurons[b].active:
c=0
d=0
while c<maxaxonsperunit:
if self.axons[c].active==False:
d=c
c=maxaxonsperunit
c=c+1
self.neurons[a].downstream_axons.append(d)
self.neurons[b].upstream_axons.append(d)
self.axons[d].active=True
self.axons[d].fireamount=amount
self.axons[d].myunit=self.id
self.axons[d].downstream_neuron=b
self.axons[d].upstream_neuron=a
return True
else:
return False
def cycle(self,inputs):
a=0
outputs=[]
#RESET ALL NEURONS BETWEEN CYCLES
for x in self.neurons:
x.amount=0
while a<runtime:
b=0
c=0
while c<len(self.input_neurons) and c<len(inputs):
self.neurons[self.input_neurons[c]].amount=inputs[c]
c=c+1
while b<maxneuronsperunit:
if self.neurons[b].active:
self.neurons[b].check()
b=b+1
#print "RUN CYCLE "+str(a)
a=a+1
def print_neurons(self):
a=0
while a<maxneuronsperunit:
if self.neurons[a].active:
print "NEURON "+str(a)+" AMT: "+str(self.neurons[a].amount)+" / "+str(self.neurons[a].threshold)
a=a+1
print "INPUTS"
for x in self.input_neurons:
print str(x)
print ""
print "OUTPUTS"
for y in self.output_neurons:
print str(y)
def designate_io(self,ins,outs):
a=0
b=0
while b<ins and a<maxneuronsperunit:
if self.neurons[a].active:
self.neurons[a].can_mutate=False
self.neurons[a].decay=1
if binary_io:
self.neurons[a].threshold=1 #IO are BINARY
self.input_neurons.append(a)
b=b+1
a=a+1
c=0
d=a
while c<outs and d<maxneuronsperunit:
if self.neurons[d].active:
self.neurons[d].can_mutate=False
self.neurons[d].decay=1
if binary_io:
self.neurons[d].threshold=1
self.output_neurons.append(d)
c=c+1
d=d+1
if c==ins and b==outs:
return True
else:
return False
def remove_axon(self,n):
if self.axons[n].active:
self.axons[n].active=False
self.axons[n].id=-1
self.axons[n].fireamount=0
u=self.axons[n].upstream_neuron
d=self.axons[n].downstream_neuron
self.axons[n].upstream_neuron=-1
self.axons[n].downstream_neuron=-1
if self.neurons[u].active:
a=0
while a<len(self.neurons[u].downstream_axons):
if self.neurons[u].downstream_axons[a]==n:
self.neurons[u].downstream_axons[a]=-1
a=a+1
if self.neurons[d].active:
b=0
while b<len(self.neurons[d].upstream_axons):
if self.neurons[d].upstream_axons[b]==n:
self.neurons[d].upstream_axons[b]=-1
b=b+1
def change_axon_destination(self,a,d):
if self.axons[a].active:
b=self.axons[a].downstream_neuron
h=0
while h<len(self.neurons[b].upstream_axons):
if self.neurons[b].upstream_axons[h]==a:
self.neurons[b].upstream_axons[h]=-1
h=h+1
self.neurons[b].upstream_axons.append(a)
self.axons[a].downstream_neuron=d
def change_axon_source(self,a,s):
if self.axons[a].active:
b=self.axons[a].upstream_neuron
h=0
while h<len(self.neurons[b].downstream_axons):
if self.neurons[b].downstream_axons[h]==a:
self.neurons[b].downstream_axons[h]=-1
h=h+1
self.axons[a].upstream_neuron=s
self.neurons[b].downstream_axons.append(a)
def change_threshold(self,n,r):
if self.neurons[n].active:
self.neurons[n].threshold=r
return True
else:
return False
def change_fireamount(self,a,r):
if self.axons[a].active:
self.axons[a].fireamount=r
return True
else:
return False
def change_decay(self,n,r):
if self.neurons[n].active:
self.neurons[n].decay=r
return True
else:
return False
def mutate(self):
choice=random.randint(0,100)
#print choice
if choice<10: #add neuron
self.add_neuron(1)
elif choice<20: # remove neuron
ok=True
found=False
a=0
while ok:
if self.neurons[a].active:
ok=False
found=True
elif a==maxneuronsperunit:
ok=False
a=a+1
if found:
self.remove_neuron(a)
#print "removed "+str(a)
elif choice<30: #add connection
ok=True
fireamount=random.randint(0,4)
fro=-1
to=-1
a=0
while ok and a<maxneuronsperunit:
f=random.randint(0,maxneuronsperunit-1)
if self.neurons[f].active:
fro=f
ok=False
a=a+1
ok=True
b=0
while ok and b<maxneuronsperunit:
t=random.randint(0,maxneuronsperunit-1)
if self.neurons[t].active:
to=t
ok=False
b=b+1
if to>-1 and fro > -1:
self.connect(fro,to,fireamount)
#print "connected "+str(fro)+" to "+str(to)+" for "+str(fireamount)
elif choice<40: #remove connection
ok=True
a=0
while ok:
h=random.randint(0,maxaxonsperunit-1)
if self.axons[h].active:
ok=False
#self.remove_axon(h)
# print "removed "+str(a)
a=a+1
if a>1000:
ok=False
elif choice<50: #change threshold WORKS
ok=True
changeamt=(random.random()-0.5)*2
while ok:
a=random.randint(0,maxneuronsperunit-1)
if self.neurons[a].active:
self.neurons[a].threshold=self.neurons[a].threshold+changeamt
# print "changed threshold for "+str(a)+ " by "+str(changeamt)
ok=False
a=a+1
elif choice<60: #change fireamount
ok=True
a=0
while ok and a<len(self.axons):
changeamt=(random.randint(-5,5))/10
if self.axons[a].active:
ok=False
self.axons[a].fireamount=self.axons[a].fireamount+changeamt
# print "changed fireamount "+str(a)+" by "+str(changeamt)
a=a+1
elif choice<70: # change axon source
a=0
b=0
kk=True
while kk:
towhere=random.randint(0,maxneuronsperunit-1)
if self.neurons[towhere].active:
kk=False
b=b+1
if b>100:
kk=False
ok=True
if b>100:
ok=False
while ok and a<len(self.axons):
if self.axons[a].active:
self.change_axon_source(a,towhere)
# print "changed axon source to "+str(towhere)+" for "+str(a)
ok=False
a=a+1
elif choice<80: # change axon destination
a=0
b=0
kk=True
while kk:
towhere=random.randint(0,maxneuronsperunit-1)
if self.neurons[towhere].active:
kk=False
b=b+1
if b>100:
kk=False
ok=True
if b>100:
ok=False
while ok and a<len(self.axons):
if self.axons[a].active:
self.change_axon_destination(a,towhere)
# print "changed axon destination to "+str(towhere)+" for "+str(a)
ok=False
a=a+1
elif choice<90: # change decay
ok=True
a=0
changeamt=(random.random()-0.5)
while ok and a<maxneuronsperunit:
if self.neurons[a].active:
self.neurons[a].decay=self.neurons[a].decay+changeamt
# print "changed decay for "+str(a)+ " by "+str(changeamt)
ok=False
a=a+1
def mutate_n(self,n):
a=0
while a<n:
self.mutate()
a=a+1
def read_outputs(self):
#OUTPUTS IN BINARY
outputs=[]
a=0
while a<len(self.output_neurons):
n=self.output_neurons[a]
if self.neurons[n].active and self.neurons[n].amount>=self.neurons[n].threshold:
outputs.append(1)
else:
outputs.append(0)
a=a+1
return outputs
def read_inputs(self):
inputs=[]
a=0
while a<len(self.input_neurons):
n=self.input_neurons[a]
if self.neurons[n].active:
inputs.append(self.neurons[n].amount)
else:
inputs.append(0)
a=a+1
return inputs
class system:
units=[unit(i) for i in range(maxunits)]
def init(self, n_units):
for i in range(0,n_units):
self.units[i].add_n_neurons(maxneuronsperunit,1)
self.units[i].designate_io(bitsize*2,bitsize)
self.units[i].active=True
def save(self):
global data
a=0
data=[] #each element is a unit
while a<maxunits:
if self.units[a].active:
r={'active_neurons':self.units[a].active_neurons,'active_axons':self.units[a].active_axons,'input_neurons':self.units[a].input_neurons,'output_neurons':self.units[a].output_neurons}
r['neurons']=[]
r['unitid']=a
#save neuron data in each active unit
b=0
while b<maxneuronsperunit:
if self.units[a].neurons[b].active:
d={'can_mutate':self.units[a].neurons[b].can_mutate,'threshold':self.units[a].neurons[b].threshold,'currentamount':self.units[a].neurons[b].amount,'decay':self.units[a].neurons[b].decay}
d['downstream_axons']=self.units[a].neurons[b].downstream_axons
d['upstream_axons']=self.units[a].neurons[b].upstream_axons
d['neuronid']=b
r['neurons'].append(d)
b=b+1
b=0
r['axons']=[]
while b<maxaxonsperunit:
if self.units[a].axons[b].active:
g={'fire_amount':self.units[a].axons[b].fireamount,'axonid':b,'upstream_neuron':self.units[a].axons[b].upstream_neuron,'downstream_neuron':self.units[a].axons[b].downstream_neuron}
r['axons'].append(g)
b=b+1
data.append(r)
a=a+1
v=json.dumps(data)
file=open('config.txt','wb')
file.write(v)
file.close()
def load(self):
global data,units
file=open('config.txt')
f=file.read()
data=json.loads(f)
a=0
while a<len(data):
r=data[a]['unitid']
self.units[r].active_axons=data[a]['active_axons']
self.units[r].active_neurons=data[a]['active_neurons']
self.units[r].input_neurons=data[a]['input_neurons']
self.units[r].output_neurons=data[a]['output_neurons']
#load neuron data
n=0
while n<len(data[a]['neurons']):
neuronid=data[a]['neurons'][n]['neuronid']
self.units[r].neurons[neuronid].threshold=data[a]['neurons'][n]['threshold']
self.units[r].neurons[neuronid].can_mutate=data[a]['neurons'][n]['can_mutate']
self.units[r].neurons[neuronid].amount=data[a]['neurons'][n]['currentamount']
self.units[r].neurons[neuronid].decay=data[a]['neurons'][n]['decay']
self.units[r].neurons[neuronid].downstream_axons=data[a]['neurons'][n]['downstream_axons']
self.units[r].neurons[neuronid].upstream_axons=data[a]['neurons'][n]['upstream_axons']
self.units[r].neurons[neuronid].active=True
n=n+1
#load axon data
g=0
while g<len(data[a]['axons']):
axon=data[a]['axons'][g]
axonid=axon['axonid']
self.units[r].axons[axonid].fire_amount=axon['fire_amount']
self.units[r].axons[axonid].upstream_neuron=axon['upstream_neuron']
self.units[r].axons[axonid].downstream_neuron=axon['downstream_neuron']
self.units[r].axons[axonid].active=True
g=g+1
a=a+1
|
{
"content_hash": "0acf1aceff4e0073da4b4921216843e3",
"timestamp": "",
"source": "github",
"line_count": 539,
"max_line_length": 210,
"avg_line_length": 30.287569573283857,
"alnum_prop": 0.48820826952526797,
"repo_name": "barisser/Neural",
"id": "aed4b94784009f028c6198933f63d047f6a0b13c",
"size": "16325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neural.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14755"
}
],
"symlink_target": ""
}
|
''' Copyright (c) 2013 Potential Ventures Ltd
Copyright (c) 2013 SolarFlare Communications Inc
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Potential Ventures Ltd,
SolarFlare Communications Inc nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''
import cherrypy
import dowser
def start(port):
cherrypy.tree.mount(dowser.Root())
cherrypy.config.update({
'environment': 'embedded',
'server.socket_port': port
})
cherrypy.engine.start()
|
{
"content_hash": "2ef53368101ee9e791fcabd450507cd9",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 79,
"avg_line_length": 48.23684210526316,
"alnum_prop": 0.7741407528641571,
"repo_name": "mkreider/cocotb2",
"id": "bee3bf3ef40efbf0d72c948445607d3f21ef2ef9",
"size": "1833",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cocotb/memdebug.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "115630"
},
{
"name": "C++",
"bytes": "110149"
},
{
"name": "Makefile",
"bytes": "62213"
},
{
"name": "Python",
"bytes": "300390"
},
{
"name": "VHDL",
"bytes": "74835"
},
{
"name": "Verilog",
"bytes": "19556"
}
],
"symlink_target": ""
}
|
"""keyrotator Cleanup command."""
from datetime import datetime
from datetime import timedelta
import logging
from dateutil import parser
from delete import DeleteCommand
from list import ListCommand
import pytz
class CleanupCommand(object):
"""Implementation of the keyrotator cleanup command."""
def run(self, project_id, iam_account, key_max_age):
"""Runs the list and delete commands for keyrotator.
Args:
project_id: The project_id for which to create the key.
iam_account: The IAM account for which to create the key.
key_max_age: An integer in units of days for which to find keys to delete.
Returns:
An integer indicating status.
"""
current_keys = ListCommand().run(
project_id, iam_account, return_results=True)
signed_key_max_age = abs(int(key_max_age))
current_datetime = datetime.now(pytz.utc)
invalid_keys = []
for key in current_keys:
try:
key_creation_time = parser.parse(key["validAfterTime"])
except ValueError as e:
logging.error("Ooops, unable to convert creation time: %s", e)
diff_time = current_datetime - key_creation_time
if diff_time.days > signed_key_max_age:
logging.info("Found invalid key %s created %s", key["name"],
key_creation_time)
invalid_keys.append(key)
for key in invalid_keys:
DeleteCommand().run(project_id, iam_account, key["name"])
if not invalid_keys:
logging.info("No keys to cleanup.")
return 0
|
{
"content_hash": "b15e5a4ad70d7c5a372dc48ed875a7b6",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 80,
"avg_line_length": 28.314814814814813,
"alnum_prop": 0.670372792674951,
"repo_name": "GoogleCloudPlatform/keyrotator",
"id": "2a0b9c2e9a38237cc19fb7ca83bf6d87d79e2e39",
"size": "2149",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keyrotator/cleanup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18822"
}
],
"symlink_target": ""
}
|
import curses
stdscr = curses.initscr()
finished = False
try:
while not finished:
c = stdscr.getch()
if c == curses.CRTL:
print("Pressed: CTRL")
except (KeyboardInterrupt, SystemExit):
finished = True
curses.endwin()
|
{
"content_hash": "7d585a317a9f9f59cf85dc6b7ebd3c63",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 39,
"avg_line_length": 15.266666666666667,
"alnum_prop": 0.7030567685589519,
"repo_name": "meigrafd/Sample-Code",
"id": "a2957fbac59fa3029b942201024fd4a03a18b855",
"size": "229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "key_input.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "496"
},
{
"name": "CSS",
"bytes": "638"
},
{
"name": "HTML",
"bytes": "1141"
},
{
"name": "JavaScript",
"bytes": "1624"
},
{
"name": "PHP",
"bytes": "77857"
},
{
"name": "Perl",
"bytes": "478"
},
{
"name": "Python",
"bytes": "382809"
},
{
"name": "Shell",
"bytes": "56023"
}
],
"symlink_target": ""
}
|
import logging
from typing import Dict
import pandas as pd
from PyQt5.QtCore import pyqtSignal, Qt
from PyQt5.QtWidgets import QAction
from pyqtgraph import Point
from pyqtgraph.GraphicsScene.mouseEvents import MouseClickEvent
from dgp.core import Icon
from dgp.core import StateAction
from dgp.core.oid import OID
from .helpers import LinearSegmentGroup, LineUpdate
from .backends import GridPlotWidget, AxisFormatter
__all__ = ['TransformPlot', 'LineSelectPlot', 'AxisFormatter', 'LineUpdate']
_log = logging.getLogger(__name__)
"""
Task specific Plotting Class definitions.
This module adds various Plotting classes based on :class:`GridPlotWidget`
which are tailored for specific tasks, e.g. the LineSelectPlot provides methods
and user-interaction features to allow a user to create line-segments (defining
a section of interesting data).
"""
class TransformPlot(GridPlotWidget):
"""Plot interface used for displaying transformation results.
May need to display data plotted against time series or scalar series.
Parameters
----------
kwargs :
Keyword arguments are supplied to the base :class:`GridPlotWidget`
The TransformPlot sets sharex=True, multiy=False and timeaxis=True by
default
rows : int
cols : int
grid : bool
"""
def __init__(self, **kwargs):
super().__init__(**kwargs, sharex=True, multiy=False, timeaxis=True)
def set_axis_formatters(self, formatter: AxisFormatter):
for i in range(self.rows):
self.set_xaxis_formatter(formatter, i, 0)
class LineSelectPlot(GridPlotWidget):
"""LineSelectPlot is a task specific plot widget which provides the user
with a click/drag interaction allowing them to create and edit data
'segments' visually on the plot surface.
Parameters
----------
rows : int, optional
Number of rows of linked plots to create, default is 1
parent : QWidget, optional
Attributes
----------
sigSegmentChanged : :class:`~pyqt.pyqtSignal` [ :class:`LineUpdate` ]
Qt Signal emitted whenever a data segment (LinearSegment) is created,
modified, or deleted.
Emits a :class:`.LineUpdate`
"""
sigSegmentChanged = pyqtSignal(LineUpdate)
def __init__(self, rows=1, parent=None):
super().__init__(rows=rows, cols=1, grid=True, sharex=True,
multiy=True, timeaxis=True, parent=parent)
self._selecting = False
self._segments: Dict[OID, LinearSegmentGroup] = {}
self.add_onclick_handler(self.onclick)
@property
def selection_mode(self) -> bool:
"""@property Return the current selection mode state
Returns
-------
bool
True if selection mode is enabled, else False
"""
return self._selecting
def set_select_mode(self, mode: bool) -> None:
"""Set the selection mode of the LineSelectPlot
"""
self._selecting = mode
for group in self._segments.values():
group.set_movable(mode)
def add_segment(self, start: float, stop: float, label: str = None,
uid: OID = None, emit: bool = True) -> LinearSegmentGroup:
"""Add a LinearSegment selection across all linked x-axes with width
ranging from start -> stop with an optional label.
To non-interactively add a segment group (e.g. when loading a saved
project) this method should be called with the uid parameter, and emit
set to False.
Parameters
----------
start : float
stop : float
label : str, optional
Optional text label to display within the segment on the plot
uid : :class:`.OID`, optional
Specify the uid of the segment group, used for re-creating segments
when loading a plot
emit : bool, optional
If False, sigSegmentChanged will not be emitted on addition of the
segment
Returns
-------
:class:`.LinearSegmentGroup`
A reference to the newly created :class:`.LinearSegmentGroup`
"""
if isinstance(start, pd.Timestamp):
start = start.value
if isinstance(stop, pd.Timestamp):
stop = stop.value
uid = uid or OID(tag='segment')
group = LinearSegmentGroup(self.plots, uid, start, stop, label=label,
movable=self._selecting)
group.sigSegmentUpdate.connect(self.sigSegmentChanged.emit)
group.sigSegmentUpdate.connect(self._segment_updated)
self._segments[uid] = group
if emit:
update = LineUpdate(StateAction.CREATE, uid, group.left,
group.right, group.label_text)
self.sigSegmentChanged.emit(update)
return group
def get_segment(self, uid: OID) -> LinearSegmentGroup:
"""Get a :class:`.LinearSegmentGroup` by its :class:`.OID`
Returns
-------
:class:`.LinearSegmentGroup` or :const:`None`
The Segment group by the given OID if it exists, else None
"""
return self._segments.get(uid, None)
def onclick(self, ev): # pragma: no cover
"""Onclick handler for mouse left/right click.
Creates a new data-segment if :attr:`.selection_mode` is True on left-click
"""
event: MouseClickEvent = ev[0]
try:
pos: Point = event.pos()
except AttributeError:
# Avoid error when clicking around plot, due to an attempt to
# call mapFromScene on None in pyqtgraph/mouseEvents.py
return
if event.button() == Qt.RightButton:
return
if event.button() == Qt.LeftButton:
if not self.selection_mode:
return
p0 = self.get_plot(row=0)
if p0.vb is None:
return
event.accept()
# Map click location to data coordinates
xpos = p0.vb.mapToView(pos).x()
v0, v1 = self.get_xlim(0)
vb_span = v1 - v0
if not self._check_proximity(xpos, vb_span):
return
start = xpos - (vb_span * 0.05)
stop = xpos + (vb_span * 0.05)
self.add_segment(start, stop)
def get_toolbar(self, parent=None):
toolbar = super().get_toolbar(parent)
action_mode = QAction(Icon.SELECT.icon(), "Toggle Selection Mode", self)
action_mode.setCheckable(True)
action_mode.setChecked(self.selection_mode)
action_mode.toggled.connect(self.set_select_mode)
toolbar.addAction(action_mode)
action_seg_visibility = QAction(Icon.LINE_MODE.icon(),
"Toggle Segment Visibility", self)
action_seg_visibility.setCheckable(True)
action_seg_visibility.setChecked(True)
action_seg_visibility.toggled.connect(self.set_segment_visibility)
toolbar.addAction(action_seg_visibility)
return toolbar
def set_segment_visibility(self, state: bool):
for segment in self._segments.values():
segment.set_visibility(state)
def _check_proximity(self, x, span, proximity=0.03) -> bool:
"""Check the proximity of a mouse click at location 'x' in relation to
any already existing LinearRegions.
Parameters
----------
x : float
Mouse click position in data coordinate
span : float
X-axis span of the view box
proximity : float
Proximity as a percentage of the ViewBox span
Returns
-------
True if x is not in proximity to any existing LinearRegionItems
False if x is inside or in proximity to an existing LinearRegionItem
"""
prox = span * proximity
for group in self._segments.values():
x0, x1 = group.region
if x0 - prox <= x <= x1 + prox:
_log.warning("New segment is too close to an existing segment")
return False
return True
def _segment_updated(self, update: LineUpdate):
if update.action is StateAction.DELETE:
del self._segments[update.uid]
|
{
"content_hash": "d62e2d2a99203c46f2906037283719c1",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 83,
"avg_line_length": 34.38429752066116,
"alnum_prop": 0.613868525417618,
"repo_name": "DynamicGravitySystems/DGP",
"id": "e59211d465e777b0df9a4352e0a75123c6541b62",
"size": "8345",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "dgp/gui/plotting/plotters.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "MATLAB",
"bytes": "4504"
},
{
"name": "Python",
"bytes": "427477"
}
],
"symlink_target": ""
}
|
from django.core.mail.message import EmailMessage
from django.core.urlresolvers import reverse
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.http import HttpResponsePermanentRedirect, HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from dnd.menu import menu_item, submenu_item, MenuItem
from dnd.forms import InaccurateContentForm
from dnd.models import NewsEntry
def permanent_redirect_view(request, view_name, args=None, kwargs=None):
url = reverse(view_name, args=args, kwargs=kwargs)
# get parameters
if len(request.GET) > 0:
#noinspection PyUnresolvedReferences
url += "?" + request.GET.urlencode()
return HttpResponsePermanentRedirect(url)
# noinspection PyShadowingBuiltins
def permanent_redirect_object(request, object):
url = object.get_absolute_url()
# get parameters
if len(request.GET) > 0:
#noinspection PyUnresolvedReferences
url += "?" + request.GET.urlencode()
return HttpResponsePermanentRedirect(url)
def is_3e_edition(edition):
return edition.system == 'DnD 3.0'
def is_admin(request):
return request.user.is_staff and request.user.is_active
@menu_item(MenuItem.CONTACTS)
@submenu_item(MenuItem.Contacts.NEWS)
def index(request):
news_entries = NewsEntry.objects.filter(enabled=True).order_by('-published')[:15]
response = render_to_response('dnd/index.html',
{
'request': request, 'news_entries': news_entries,
},
context_instance=RequestContext(request), )
if len(news_entries):
response.set_cookie('top_news', news_entries[0].pk, 10 * 365 * 24 * 60 * 60)
return response
def inaccurate_content(request):
if request.method == 'POST':
form = InaccurateContentForm(request.POST, initial={
'captcha': request.META['REMOTE_ADDR']})
if form.is_valid():
if form.cleaned_data['sender']:
headers = {
'Reply-To': form.cleaned_data['sender']}
else:
headers = {}
email = EmailMessage(
subject='Problem in url %s' % form.cleaned_data['url'],
body="Message: %s\n\nUrl: %s\n\nBetter desc:%s\nFrom: %s" % (
form.cleaned_data['message'], form.cleaned_data['url'],
form.cleaned_data['better_description'],
form.cleaned_data['sender']),
from_email='mailer@dndtools.eu',
to=('dndtoolseu@googlegroups.com', 'dndtools.eu@gmail.com'),
headers=headers,
)
email.send()
# Trello email
email = EmailMessage(
subject='#2 BUGFIXES: %s' % form.cleaned_data['url'],
body="Message: %s\n\nUrl: %s\n\nBetter desc:%s\nFrom: %s" % (
form.cleaned_data['message'], form.cleaned_data['url'],
form.cleaned_data['better_description'],
form.cleaned_data['sender']),
from_email='mailer@dndtools.eu',
to=('dndtoolsbugfixes+fdpadeovxynhkxuyzjqf@boards.trello.com', ),
headers=headers,
)
email.send()
# Redirect after POST
return HttpResponseRedirect(reverse('inaccurate_content_sent'))
else:
form = InaccurateContentForm(
initial={
'url': request.GET.get('url', ''),
})
return render_to_response('dnd/inaccurate_content.html',
{
'request': request,
'form': form, }, context_instance=RequestContext(request), )
def inaccurate_content_sent(request):
return render_to_response('dnd/inaccurate_content_sent.html',
{
'request': request,
}, context_instance=RequestContext(request), )
#@revision.create_on_success
def very_secret_url(request):
log = ''
#noinspection PyUnresolvedReferences
revision.comment = "Automatic (updating PHB spell pages)"
#noinspection PyUnresolvedReferences
revision.user = User.objects.get(username='dndtools')
# counter = 1
#
# phb = Rulebook.objects.get(abbr='PH')
#
# for line in data.split('\n'):
# line = line.strip()
# m = re.match('([^\t]+)\tPH \t(\d+)', line)
# if m:
# spells = Spell.objects.filter(rulebook=phb, slug=slugify(m.group(1).strip()))
# spell = spells[0] if spells else None
#
# if spell and spell.page is None:
# spell.page = m.group(2).strip()
# spell.save()
#
# message = '%05d %s saved\n' % (counter, spell)
# log += message
# print message,
# counter += 1
# else:
# message = '%05d %s IGNORED\n' % (counter, spell)
# log += message
# print message,
# counter += 1
return render_to_response('dnd/very_secret_url.html',
{
'request': request,
'log': log,
}, context_instance=RequestContext(request), )
def user_login(request):
# Like before, obtain the context for the user's request.
context = RequestContext(request)
next = ""
if request.GET:
next = request.GET['next']
if request.user.is_authenticated():
if next != "":
return HttpResponseRedirect(next);
# If the request is a HTTP POST, try to pull out the relevant information.
if request.method == 'POST':
# Gather the username and password provided by the user.
# This information is obtained from the login form.
username = request.POST['username']
password = request.POST['password']
# Use Django's machinery to attempt to see if the username/password
# combination is valid - a User object is returned if it is.
user = authenticate(username=username, password=password)
# If we have a User object, the details are correct.
# If None (Python's way of representing the absence of a value), no user
# with matching credentials was found.
if user:
# Is the account active? It could have been disabled.
if user.is_active:
# If the account is valid and active, we can log the user in.
# We'll send the user back to the homepage.
login(request, user)
if next == "":
return HttpResponseRedirect('/')
else:
return HttpResponseRedirect(next)
else:
# An inactive account was used - no logging in!
return HttpResponse("Your account is disabled.")
else:
# Bad login details were provided. So we can't log the user in.
print "Invalid login details: {0}, {1}".format(username, password)
return HttpResponse("Invalid login details supplied.")
# The request is not a HTTP POST, so display the login form.
# This scenario would most likely be a HTTP GET.
else:
# No context variables to pass to the template system, hence the
# blank dictionary object...
if next == "":
return render_to_response('dnd/login.html', {}, context)
else:
return render_to_response('dnd/login.html', {'next': next }, context)
# Use the login_required() decorator to ensure only those logged in can access the view.
@login_required
def user_logout(request):
# Since we know the user is logged in, we can now just log them out.
logout(request)
# Take the user back to the homepage.
return HttpResponseRedirect('/')
|
{
"content_hash": "b2912327bb3b08fc4c05ecb88a9ce8cc",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 94,
"avg_line_length": 37.318385650224215,
"alnum_prop": 0.5651285748618121,
"repo_name": "FreezyExp/dndtools",
"id": "eac1a6214971756baeda60c62e25b8e1d63354a9",
"size": "8347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dndtools/dnd/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "53351"
},
{
"name": "HTML",
"bytes": "197174"
},
{
"name": "JavaScript",
"bytes": "1941"
},
{
"name": "Python",
"bytes": "392237"
}
],
"symlink_target": ""
}
|
import webob
from jacket.api.compute.openstack.compute import access_ips
from jacket.api.compute.openstack.compute import extension_info
from jacket.api.compute.openstack.compute.legacy_v2 import servers as servers_v20
from jacket.api.compute.openstack.compute import servers as servers_v21
from jacket.api.compute.openstack import extensions as extensions_v20
from jacket.api.compute.openstack import wsgi
from jacket.compute.cloud import api as compute_api
from jacket.compute import exception
from jacket.objects.compute import instance as instance_obj
from jacket.compute import test
from jacket.tests.compute.unit.api.openstack import fakes
from jacket.tests.compute.unit.image import fake
class AccessIPsExtTestV21(test.NoDBTestCase):
def setUp(self):
super(AccessIPsExtTestV21, self).setUp()
self.access_ips_ext = access_ips.AccessIPs(None)
def _test(self, func):
server_dict = {access_ips.AccessIPs.v4_key: '1.1.1.1',
access_ips.AccessIPs.v6_key: 'fe80::'}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {'access_ip_v4': '1.1.1.1',
'access_ip_v6': 'fe80::'})
def _test_with_ipv4_only(self, func):
server_dict = {access_ips.AccessIPs.v4_key: '1.1.1.1'}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {'access_ip_v4': '1.1.1.1'})
def _test_with_ipv6_only(self, func):
server_dict = {access_ips.AccessIPs.v6_key: 'fe80::'}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {'access_ip_v6': 'fe80::'})
def _test_without_ipv4_and_ipv6(self, func):
server_dict = {}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {})
def _test_with_ipv4_null(self, func):
server_dict = {access_ips.AccessIPs.v4_key: None}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {'access_ip_v4': None})
def _test_with_ipv6_null(self, func):
server_dict = {access_ips.AccessIPs.v6_key: None}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {'access_ip_v6': None})
def _test_with_ipv4_blank(self, func):
server_dict = {access_ips.AccessIPs.v4_key: ''}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {'access_ip_v4': None})
def _test_with_ipv6_blank(self, func):
server_dict = {access_ips.AccessIPs.v6_key: ''}
create_kwargs = {}
func(server_dict, create_kwargs)
self.assertEqual(create_kwargs, {'access_ip_v6': None})
def test_server_create(self):
self._test(self.access_ips_ext.server_create)
def test_server_create_with_ipv4_only(self):
self._test_with_ipv4_only(self.access_ips_ext.server_create)
def test_server_create_with_ipv6_only(self):
self._test_with_ipv6_only(self.access_ips_ext.server_create)
def test_server_create_without_ipv4_and_ipv6(self):
self._test_without_ipv4_and_ipv6(self.access_ips_ext.server_create)
def test_server_create_with_ipv4_null(self):
self._test_with_ipv4_null(self.access_ips_ext.server_create)
def test_server_create_with_ipv6_null(self):
self._test_with_ipv6_null(self.access_ips_ext.server_create)
def test_server_create_with_ipv4_blank(self):
self._test_with_ipv4_blank(self.access_ips_ext.server_create)
def test_server_create_with_ipv6_blank(self):
self._test_with_ipv6_blank(self.access_ips_ext.server_create)
def test_server_update(self):
self._test(self.access_ips_ext.server_update)
def test_server_update_with_ipv4_only(self):
self._test_with_ipv4_only(self.access_ips_ext.server_update)
def test_server_update_with_ipv6_only(self):
self._test_with_ipv6_only(self.access_ips_ext.server_update)
def test_server_update_without_ipv4_and_ipv6(self):
self._test_without_ipv4_and_ipv6(self.access_ips_ext.server_update)
def test_server_update_with_ipv4_null(self):
self._test_with_ipv4_null(self.access_ips_ext.server_update)
def test_server_update_with_ipv6_null(self):
self._test_with_ipv6_null(self.access_ips_ext.server_update)
def test_server_update_with_ipv4_blank(self):
self._test_with_ipv4_blank(self.access_ips_ext.server_update)
def test_server_update_with_ipv6_blank(self):
self._test_with_ipv6_blank(self.access_ips_ext.server_update)
def test_server_rebuild(self):
self._test(self.access_ips_ext.server_rebuild)
def test_server_rebuild_with_ipv4_only(self):
self._test_with_ipv4_only(self.access_ips_ext.server_rebuild)
def test_server_rebuild_with_ipv6_only(self):
self._test_with_ipv6_only(self.access_ips_ext.server_rebuild)
def test_server_rebuild_without_ipv4_and_ipv6(self):
self._test_without_ipv4_and_ipv6(self.access_ips_ext.server_rebuild)
def test_server_rebuild_with_ipv4_null(self):
self._test_with_ipv4_null(self.access_ips_ext.server_rebuild)
def test_server_rebuild_with_ipv6_null(self):
self._test_with_ipv6_null(self.access_ips_ext.server_rebuild)
def test_server_rebuild_with_ipv4_blank(self):
self._test_with_ipv4_blank(self.access_ips_ext.server_rebuild)
def test_server_rebuild_with_ipv6_blank(self):
self._test_with_ipv6_blank(self.access_ips_ext.server_rebuild)
class AccessIPsExtAPIValidationTestV21(test.TestCase):
validation_error = exception.ValidationError
def setUp(self):
super(AccessIPsExtAPIValidationTestV21, self).setUp()
def fake_save(context, **kwargs):
pass
def fake_rebuild(*args, **kwargs):
pass
self._set_up_controller()
fake.stub_out_image_service(self)
self.stub_out('compute.db.instance_get_by_uuid',
fakes.fake_instance_get())
self.stubs.Set(instance_obj.Instance, 'save', fake_save)
self.stubs.Set(compute_api.API, 'rebuild', fake_rebuild)
self.req = fakes.HTTPRequest.blank('')
def _set_up_controller(self):
ext_info = extension_info.LoadedExtensionInfo()
self.controller = servers_v21.ServersController(
extension_info=ext_info)
# Note(gmann): V2.1 has Access IP as separate extension. This class tests
# calls controller directly so Access IPs will not be present in server
# response. Those are being tested in AccessIPsExtTest class.
def _verify_update_access_ip(self, res_dict, params):
pass
def _test_create(self, params):
body = {
'server': {
'name': 'server_test',
'imageRef': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
'flavorRef': 'http://localhost/123/flavors/3',
},
}
body['server'].update(params)
res_dict = self.controller.create(self.req, body=body).obj
return res_dict
def _test_update(self, params):
body = {
'server': {
},
}
body['server'].update(params)
res_dict = self.controller.update(self.req, fakes.FAKE_UUID, body=body)
self._verify_update_access_ip(res_dict, params)
def _test_rebuild(self, params):
body = {
'rebuild': {
'imageRef': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
},
}
body['rebuild'].update(params)
self.controller._action_rebuild(self.req, fakes.FAKE_UUID, body=body)
def test_create_server_with_access_ipv4(self):
params = {access_ips.AccessIPs.v4_key: '192.168.0.10'}
self._test_create(params)
def test_create_server_with_access_ip_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False)
params = {access_ips.AccessIPs.v4_key: '192.168.0.10',
access_ips.AccessIPs.v6_key: '2001:db8::9abc'}
res = self._test_create(params)
server = res['server']
self.assertNotIn("admin_password", server)
def test_create_server_with_invalid_access_ipv4(self):
params = {access_ips.AccessIPs.v4_key: '1.1.1.1.1.1'}
self.assertRaises(self.validation_error, self._test_create, params)
def test_create_server_with_access_ipv6(self):
params = {access_ips.AccessIPs.v6_key: '2001:db8::9abc'}
self._test_create(params)
def test_create_server_with_invalid_access_ipv6(self):
params = {access_ips.AccessIPs.v6_key: 'fe80:::::::'}
self.assertRaises(self.validation_error, self._test_create, params)
def test_update_server_with_access_ipv4(self):
params = {access_ips.AccessIPs.v4_key: '192.168.0.10'}
self._test_update(params)
def test_update_server_with_invalid_access_ipv4(self):
params = {access_ips.AccessIPs.v4_key: '1.1.1.1.1.1'}
self.assertRaises(self.validation_error, self._test_update, params)
def test_update_server_with_access_ipv6(self):
params = {access_ips.AccessIPs.v6_key: '2001:db8::9abc'}
self._test_update(params)
def test_update_server_with_invalid_access_ipv6(self):
params = {access_ips.AccessIPs.v6_key: 'fe80:::::::'}
self.assertRaises(self.validation_error, self._test_update, params)
def test_rebuild_server_with_access_ipv4(self):
params = {access_ips.AccessIPs.v4_key: '192.168.0.10'}
self._test_rebuild(params)
def test_rebuild_server_with_invalid_access_ipv4(self):
params = {access_ips.AccessIPs.v4_key: '1.1.1.1.1.1'}
self.assertRaises(self.validation_error, self._test_rebuild,
params)
def test_rebuild_server_with_access_ipv6(self):
params = {access_ips.AccessIPs.v6_key: '2001:db8::9abc'}
self._test_rebuild(params)
def test_rebuild_server_with_invalid_access_ipv6(self):
params = {access_ips.AccessIPs.v6_key: 'fe80:::::::'}
self.assertRaises(self.validation_error, self._test_rebuild,
params)
class AccessIPsExtAPIValidationTestV2(AccessIPsExtAPIValidationTestV21):
validation_error = webob.exc.HTTPBadRequest
def _set_up_controller(self):
self.ext_mgr = extensions_v20.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers_v20.Controller(self.ext_mgr)
def _verify_update_access_ip(self, res_dict, params):
for key, value in params.items():
value = value or ''
self.assertEqual(res_dict['server'][key], value)
# Note(gmann): Below tests are only valid for V2 as
# V2.1 has strong input validation and does not allow
# None or blank access ips.
def test_update_server_access_ipv4_none(self):
params = {access_ips.AccessIPs.v4_key: None}
self._test_update(params)
def test_update_server_access_ipv4_blank(self):
params = {access_ips.AccessIPs.v4_key: ''}
self._test_update(params)
def test_update_server_access_ipv6_none(self):
params = {access_ips.AccessIPs.v6_key: None}
self._test_update(params)
def test_update_server_access_ipv6_blank(self):
params = {access_ips.AccessIPs.v6_key: ''}
self._test_update(params)
class AccessIPsControllerTestV21(test.NoDBTestCase):
def setUp(self):
super(AccessIPsControllerTestV21, self).setUp()
self.controller = access_ips.AccessIPsController()
def _test_with_access_ips(self, func, kwargs={'id': 'fake'}):
req = wsgi.Request({'compute.context':
fakes.FakeRequestContext('fake_user', 'fake',
is_admin=True)})
instance = {'uuid': 'fake',
'access_ip_v4': '1.1.1.1',
'access_ip_v6': 'fe80::'}
req.cache_db_instance(instance)
resp_obj = wsgi.ResponseObject(
{"server": {'id': 'fake'}})
func(req, resp_obj, **kwargs)
self.assertEqual(resp_obj.obj['server'][access_ips.AccessIPs.v4_key],
'1.1.1.1')
self.assertEqual(resp_obj.obj['server'][access_ips.AccessIPs.v6_key],
'fe80::')
def _test_without_access_ips(self, func, kwargs={'id': 'fake'}):
req = wsgi.Request({'compute.context':
fakes.FakeRequestContext('fake_user', 'fake',
is_admin=True)})
instance = {'uuid': 'fake',
'access_ip_v4': None,
'access_ip_v6': None}
req.cache_db_instance(instance)
resp_obj = wsgi.ResponseObject(
{"server": {'id': 'fake'}})
func(req, resp_obj, **kwargs)
self.assertEqual(resp_obj.obj['server'][access_ips.AccessIPs.v4_key],
'')
self.assertEqual(resp_obj.obj['server'][access_ips.AccessIPs.v6_key],
'')
def test_show(self):
self._test_with_access_ips(self.controller.show)
def test_show_without_access_ips(self):
self._test_without_access_ips(self.controller.show)
def test_detail(self):
req = wsgi.Request({'compute.context':
fakes.FakeRequestContext('fake_user', 'fake',
is_admin=True)})
instance1 = {'uuid': 'fake1',
'access_ip_v4': '1.1.1.1',
'access_ip_v6': 'fe80::'}
instance2 = {'uuid': 'fake2',
'access_ip_v4': '1.1.1.2',
'access_ip_v6': 'fe81::'}
req.cache_db_instance(instance1)
req.cache_db_instance(instance2)
resp_obj = wsgi.ResponseObject(
{"servers": [{'id': 'fake1'}, {'id': 'fake2'}]})
self.controller.detail(req, resp_obj)
self.assertEqual(
resp_obj.obj['servers'][0][access_ips.AccessIPs.v4_key],
'1.1.1.1')
self.assertEqual(
resp_obj.obj['servers'][0][access_ips.AccessIPs.v6_key],
'fe80::')
self.assertEqual(
resp_obj.obj['servers'][1][access_ips.AccessIPs.v4_key],
'1.1.1.2')
self.assertEqual(
resp_obj.obj['servers'][1][access_ips.AccessIPs.v6_key],
'fe81::')
def test_detail_without_access_ips(self):
req = wsgi.Request({'compute.context':
fakes.FakeRequestContext('fake_user', 'fake',
is_admin=True)})
instance1 = {'uuid': 'fake1',
'access_ip_v4': None,
'access_ip_v6': None}
instance2 = {'uuid': 'fake2',
'access_ip_v4': None,
'access_ip_v6': None}
req.cache_db_instance(instance1)
req.cache_db_instance(instance2)
resp_obj = wsgi.ResponseObject(
{"servers": [{'id': 'fake1'}, {'id': 'fake2'}]})
self.controller.detail(req, resp_obj)
self.assertEqual(
resp_obj.obj['servers'][0][access_ips.AccessIPs.v4_key], '')
self.assertEqual(
resp_obj.obj['servers'][0][access_ips.AccessIPs.v6_key], '')
self.assertEqual(
resp_obj.obj['servers'][1][access_ips.AccessIPs.v4_key], '')
self.assertEqual(
resp_obj.obj['servers'][1][access_ips.AccessIPs.v6_key], '')
def test_update(self):
self._test_with_access_ips(self.controller.update, {'id': 'fake',
'body': {}})
def test_update_without_access_ips(self):
self._test_without_access_ips(self.controller.update, {'id': 'fake',
'body': {}})
def test_rebuild(self):
self._test_with_access_ips(self.controller.rebuild, {'id': 'fake',
'body': {}})
def test_rebuild_without_access_ips(self):
self._test_without_access_ips(self.controller.rebuild, {'id': 'fake',
'body': {}})
|
{
"content_hash": "230673e6d44694e5ce02fa23c291e8ef",
"timestamp": "",
"source": "github",
"line_count": 411,
"max_line_length": 81,
"avg_line_length": 40.09002433090024,
"alnum_prop": 0.6022334162772349,
"repo_name": "HybridF5/jacket",
"id": "7de5b98d7f3edaa983cc1b1a0dccb26e4de49fb2",
"size": "17079",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jacket/tests/compute/unit/api/openstack/compute/test_access_ips.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "26995056"
},
{
"name": "Shell",
"bytes": "28464"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
}
|
import os
import unittest
from conans.test.utils.tools import TestClient
from conans.util.files import load
class SysrootTest(unittest.TestCase):
def test(self):
client = TestClient()
sysroot = """from conans import ConanFile
class Pkg(ConanFile):
def package_info(self):
self.cpp_info.sysroot = "HelloSysRoot"
"""
client.save({"conanfile.py": sysroot})
client.run("create . sysroot/0.1@user/testing")
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
build_requires = "sysroot/0.1@user/testing"
def build(self):
self.output.info("PKG SYSROOT: %s" % self.deps_cpp_info.sysroot)
def package_info(self):
self.cpp_info.sysroot = "HelloSysRoot"
"""
test_conanfile = """from conans import ConanFile
class Pkg(ConanFile):
def build(self):
self.output.info("Test SYSROOT: %s" % self.deps_cpp_info.sysroot)
def test(self):
pass
"""
client.save({"conanfile.py": conanfile,
"test_package/conanfile.py": test_conanfile})
client.run("create . Pkg/0.1@user/testing")
self.assertIn("Pkg/0.1@user/testing: PKG SYSROOT: HelloSysRoot", client.out)
self.assertIn("Pkg/0.1@user/testing (test package): Test SYSROOT: HelloSysRoot", client.out)
# Install conanfile and check conaninfo.txt
client.run("install .")
bili = load(os.path.join(client.current_folder, "conanbuildinfo.txt"))
self.assertIn(os.linesep.join(["[sysroot_sysroot]", "HelloSysRoot"]), bili)
self.assertIn(os.linesep.join(["[sysroot]", "HelloSysRoot"]), bili)
|
{
"content_hash": "fe1b467f65d4e038986effd834090fe9",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 100,
"avg_line_length": 37.20454545454545,
"alnum_prop": 0.6511912034208919,
"repo_name": "birsoyo/conan",
"id": "3a0154674ead315db67c3be417d0d5f1c19e5073",
"size": "1637",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "conans/test/sysroot_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1100"
},
{
"name": "Groovy",
"bytes": "6251"
},
{
"name": "Python",
"bytes": "3101477"
},
{
"name": "Shell",
"bytes": "1864"
}
],
"symlink_target": ""
}
|
"""IIR and FIR filtering and resampling functions."""
from collections import Counter
from copy import deepcopy
from functools import partial
import numpy as np
from .annotations import _annotations_starts_stops
from .io.pick import _picks_to_idx
from .cuda import (_setup_cuda_fft_multiply_repeated, _fft_multiply_repeated,
_setup_cuda_fft_resample, _fft_resample, _smart_pad)
from .parallel import parallel_func
from .time_frequency.multitaper import _mt_spectra, _compute_mt_params
from .utils import (logger, verbose, sum_squared, warn, _pl,
_check_preload, _validate_type, _check_option, _ensure_int)
from ._ola import _COLA
# These values from Ifeachor and Jervis.
_length_factors = dict(hann=3.1, hamming=3.3, blackman=5.0)
def is_power2(num):
"""Test if number is a power of 2.
Parameters
----------
num : int
Number.
Returns
-------
b : bool
True if is power of 2.
Examples
--------
>>> is_power2(2 ** 3)
True
>>> is_power2(5)
False
"""
num = int(num)
return num != 0 and ((num & (num - 1)) == 0)
def next_fast_len(target):
"""Find the next fast size of input data to `fft`, for zero-padding, etc.
SciPy's FFTPACK has efficient functions for radix {2, 3, 4, 5}, so this
returns the next composite of the prime factors 2, 3, and 5 which is
greater than or equal to `target`. (These are also known as 5-smooth
numbers, regular numbers, or Hamming numbers.)
Parameters
----------
target : int
Length to start searching from. Must be a positive integer.
Returns
-------
out : int
The first 5-smooth number greater than or equal to `target`.
Notes
-----
Copied from SciPy with minor modifications.
"""
from bisect import bisect_left
hams = (8, 9, 10, 12, 15, 16, 18, 20, 24, 25, 27, 30, 32, 36, 40, 45, 48,
50, 54, 60, 64, 72, 75, 80, 81, 90, 96, 100, 108, 120, 125, 128,
135, 144, 150, 160, 162, 180, 192, 200, 216, 225, 240, 243, 250,
256, 270, 288, 300, 320, 324, 360, 375, 384, 400, 405, 432, 450,
480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675, 720, 729,
750, 768, 800, 810, 864, 900, 960, 972, 1000, 1024, 1080, 1125,
1152, 1200, 1215, 1250, 1280, 1296, 1350, 1440, 1458, 1500, 1536,
1600, 1620, 1728, 1800, 1875, 1920, 1944, 2000, 2025, 2048, 2160,
2187, 2250, 2304, 2400, 2430, 2500, 2560, 2592, 2700, 2880, 2916,
3000, 3072, 3125, 3200, 3240, 3375, 3456, 3600, 3645, 3750, 3840,
3888, 4000, 4050, 4096, 4320, 4374, 4500, 4608, 4800, 4860, 5000,
5120, 5184, 5400, 5625, 5760, 5832, 6000, 6075, 6144, 6250, 6400,
6480, 6561, 6750, 6912, 7200, 7290, 7500, 7680, 7776, 8000, 8100,
8192, 8640, 8748, 9000, 9216, 9375, 9600, 9720, 10000)
if target <= 6:
return target
# Quickly check if it's already a power of 2
if not (target & (target - 1)):
return target
# Get result quickly for small sizes, since FFT itself is similarly fast.
if target <= hams[-1]:
return hams[bisect_left(hams, target)]
match = float('inf') # Anything found will be smaller
p5 = 1
while p5 < target:
p35 = p5
while p35 < target:
# Ceiling integer division, avoiding conversion to float
# (quotient = ceil(target / p35))
quotient = -(-target // p35)
p2 = 2 ** int(quotient - 1).bit_length()
N = p2 * p35
if N == target:
return N
elif N < match:
match = N
p35 *= 3
if p35 == target:
return p35
if p35 < match:
match = p35
p5 *= 5
if p5 == target:
return p5
if p5 < match:
match = p5
return match
def _overlap_add_filter(x, h, n_fft=None, phase='zero', picks=None,
n_jobs=None, copy=True, pad='reflect_limited'):
"""Filter the signal x using h with overlap-add FFTs.
Parameters
----------
x : array, shape (n_signals, n_times)
Signals to filter.
h : 1d array
Filter impulse response (FIR filter coefficients). Must be odd length
if phase == 'linear'.
n_fft : int
Length of the FFT. If None, the best size is determined automatically.
phase : str
If 'zero', the delay for the filter is compensated (and it must be
an odd-length symmetric filter). If 'linear', the response is
uncompensated. If 'zero-double', the filter is applied in the
forward and reverse directions. If 'minimum', a minimum-phase
filter will be used.
picks : list | None
See calling functions.
n_jobs : int | str
Number of jobs to run in parallel. Can be 'cuda' if ``cupy``
is installed properly.
copy : bool
If True, a copy of x, filtered, is returned. Otherwise, it operates
on x in place.
pad : str
Padding type for ``_smart_pad``.
Returns
-------
x : array, shape (n_signals, n_times)
x filtered.
"""
# set up array for filtering, reshape to 2D, operate on last axis
x, orig_shape, picks = _prep_for_filtering(x, copy, picks)
# Extend the signal by mirroring the edges to reduce transient filter
# response
_check_zero_phase_length(len(h), phase)
if len(h) == 1:
return x * h ** 2 if phase == 'zero-double' else x * h
n_edge = max(min(len(h), x.shape[1]) - 1, 0)
logger.debug('Smart-padding with: %s samples on each edge' % n_edge)
n_x = x.shape[1] + 2 * n_edge
if phase == 'zero-double':
h = np.convolve(h, h[::-1])
# Determine FFT length to use
min_fft = 2 * len(h) - 1
if n_fft is None:
max_fft = n_x
if max_fft >= min_fft:
# cost function based on number of multiplications
N = 2 ** np.arange(np.ceil(np.log2(min_fft)),
np.ceil(np.log2(max_fft)) + 1, dtype=int)
cost = (np.ceil(n_x / (N - len(h) + 1).astype(np.float64)) *
N * (np.log2(N) + 1))
# add a heuristic term to prevent too-long FFT's which are slow
# (not predicted by mult. cost alone, 4e-5 exp. determined)
cost += 4e-5 * N * n_x
n_fft = N[np.argmin(cost)]
else:
# Use only a single block
n_fft = next_fast_len(min_fft)
logger.debug('FFT block length: %s' % n_fft)
if n_fft < min_fft:
raise ValueError('n_fft is too short, has to be at least '
'2 * len(h) - 1 (%s), got %s' % (min_fft, n_fft))
# Figure out if we should use CUDA
n_jobs, cuda_dict = _setup_cuda_fft_multiply_repeated(n_jobs, h, n_fft)
# Process each row separately
picks = _picks_to_idx(len(x), picks)
parallel, p_fun, _ = parallel_func(_1d_overlap_filter, n_jobs)
if n_jobs == 1:
for p in picks:
x[p] = _1d_overlap_filter(x[p], len(h), n_edge, phase,
cuda_dict, pad, n_fft)
else:
data_new = parallel(p_fun(x[p], len(h), n_edge, phase,
cuda_dict, pad, n_fft) for p in picks)
for pp, p in enumerate(picks):
x[p] = data_new[pp]
x.shape = orig_shape
return x
def _1d_overlap_filter(x, n_h, n_edge, phase, cuda_dict, pad, n_fft):
"""Do one-dimensional overlap-add FFT FIR filtering."""
# pad to reduce ringing
x_ext = _smart_pad(x, (n_edge, n_edge), pad)
n_x = len(x_ext)
x_filtered = np.zeros_like(x_ext)
n_seg = n_fft - n_h + 1
n_segments = int(np.ceil(n_x / float(n_seg)))
shift = ((n_h - 1) // 2 if phase.startswith('zero') else 0) + n_edge
# Now the actual filtering step is identical for zero-phase (filtfilt-like)
# or single-pass
for seg_idx in range(n_segments):
start = seg_idx * n_seg
stop = (seg_idx + 1) * n_seg
seg = x_ext[start:stop]
seg = np.concatenate([seg, np.zeros(n_fft - len(seg))])
prod = _fft_multiply_repeated(seg, cuda_dict)
start_filt = max(0, start - shift)
stop_filt = min(start - shift + n_fft, n_x)
start_prod = max(0, shift - start)
stop_prod = start_prod + stop_filt - start_filt
x_filtered[start_filt:stop_filt] += prod[start_prod:stop_prod]
# Remove mirrored edges that we added and cast (n_edge can be zero)
x_filtered = x_filtered[:n_x - 2 * n_edge].astype(x.dtype)
return x_filtered
def _filter_attenuation(h, freq, gain):
"""Compute minimum attenuation at stop frequency."""
from scipy.signal import freqz
_, filt_resp = freqz(h.ravel(), worN=np.pi * freq)
filt_resp = np.abs(filt_resp) # use amplitude response
filt_resp[np.where(gain == 1)] = 0
idx = np.argmax(filt_resp)
att_db = -20 * np.log10(np.maximum(filt_resp[idx], 1e-20))
att_freq = freq[idx]
return att_db, att_freq
def _prep_for_filtering(x, copy, picks=None):
"""Set up array as 2D for filtering ease."""
x = _check_filterable(x)
if copy is True:
x = x.copy()
orig_shape = x.shape
x = np.atleast_2d(x)
picks = _picks_to_idx(x.shape[-2], picks)
x.shape = (np.prod(x.shape[:-1]), x.shape[-1])
if len(orig_shape) == 3:
n_epochs, n_channels, n_times = orig_shape
offset = np.repeat(np.arange(0, n_channels * n_epochs, n_channels),
len(picks))
picks = np.tile(picks, n_epochs) + offset
elif len(orig_shape) > 3:
raise ValueError('picks argument is not supported for data with more'
' than three dimensions')
assert all(0 <= pick < x.shape[0] for pick in picks) # guaranteed by above
return x, orig_shape, picks
def _firwin_design(N, freq, gain, window, sfreq):
"""Construct a FIR filter using firwin."""
from scipy.signal import firwin
assert freq[0] == 0
assert len(freq) > 1
assert len(freq) == len(gain)
assert N % 2 == 1
h = np.zeros(N)
prev_freq = freq[-1]
prev_gain = gain[-1]
if gain[-1] == 1:
h[N // 2] = 1 # start with "all up"
assert prev_gain in (0, 1)
for this_freq, this_gain in zip(freq[::-1][1:], gain[::-1][1:]):
assert this_gain in (0, 1)
if this_gain != prev_gain:
# Get the correct N to satistify the requested transition bandwidth
transition = (prev_freq - this_freq) / 2.
this_N = int(round(_length_factors[window] / transition))
this_N += (1 - this_N % 2) # make it odd
if this_N > N:
raise ValueError('The requested filter length %s is too short '
'for the requested %0.2f Hz transition band, '
'which requires %s samples'
% (N, transition * sfreq / 2., this_N))
# Construct a lowpass
this_h = firwin(this_N, (prev_freq + this_freq) / 2.,
window=window, pass_zero=True, fs=freq[-1] * 2)
assert this_h.shape == (this_N,)
offset = (N - this_N) // 2
if this_gain == 0:
h[offset:N - offset] -= this_h
else:
h[offset:N - offset] += this_h
prev_gain = this_gain
prev_freq = this_freq
return h
def _construct_fir_filter(sfreq, freq, gain, filter_length, phase, fir_window,
fir_design):
"""Filter signal using gain control points in the frequency domain.
The filter impulse response is constructed from a Hann window (window
used in "firwin2" function) to avoid ripples in the frequency response
(windowing is a smoothing in frequency domain).
If x is multi-dimensional, this operates along the last dimension.
Parameters
----------
sfreq : float
Sampling rate in Hz.
freq : 1d array
Frequency sampling points in Hz.
gain : 1d array
Filter gain at frequency sampling points.
Must be all 0 and 1 for fir_design=="firwin".
filter_length : int
Length of the filter to use. Must be odd length if phase == "zero".
phase : str
If 'zero', the delay for the filter is compensated (and it must be
an odd-length symmetric filter). If 'linear', the response is
uncompensated. If 'zero-double', the filter is applied in the
forward and reverse directions. If 'minimum', a minimum-phase
filter will be used.
fir_window : str
The window to use in FIR design, can be "hamming" (default),
"hann", or "blackman".
fir_design : str
Can be "firwin2" or "firwin".
Returns
-------
h : array
Filter coefficients.
"""
assert freq[0] == 0
if fir_design == 'firwin2':
from scipy.signal import firwin2 as fir_design
else:
assert fir_design == 'firwin'
fir_design = partial(_firwin_design, sfreq=sfreq)
from scipy.signal import minimum_phase
# issue a warning if attenuation is less than this
min_att_db = 12 if phase == 'minimum' else 20
# normalize frequencies
freq = np.array(freq) / (sfreq / 2.)
if freq[0] != 0 or freq[-1] != 1:
raise ValueError('freq must start at 0 and end an Nyquist (%s), got %s'
% (sfreq / 2., freq))
gain = np.array(gain)
# Use overlap-add filter with a fixed length
N = _check_zero_phase_length(filter_length, phase, gain[-1])
# construct symmetric (linear phase) filter
if phase == 'minimum':
h = fir_design(N * 2 - 1, freq, gain, window=fir_window)
h = minimum_phase(h)
else:
h = fir_design(N, freq, gain, window=fir_window)
assert h.size == N
att_db, att_freq = _filter_attenuation(h, freq, gain)
if phase == 'zero-double':
att_db += 6
if att_db < min_att_db:
att_freq *= sfreq / 2.
warn('Attenuation at stop frequency %0.2f Hz is only %0.2f dB. '
'Increase filter_length for higher attenuation.'
% (att_freq, att_db))
return h
def _check_zero_phase_length(N, phase, gain_nyq=0):
N = int(N)
if N % 2 == 0:
if phase == 'zero':
raise RuntimeError('filter_length must be odd if phase="zero", '
'got %s' % N)
elif phase == 'zero-double' and gain_nyq == 1:
N += 1
return N
def _check_coefficients(system):
"""Check for filter stability."""
if isinstance(system, tuple):
from scipy.signal import tf2zpk
z, p, k = tf2zpk(*system)
else: # sos
from scipy.signal import sos2zpk
z, p, k = sos2zpk(system)
if np.any(np.abs(p) > 1.0):
raise RuntimeError('Filter poles outside unit circle, filter will be '
'unstable. Consider using different filter '
'coefficients.')
def _filtfilt(x, iir_params, picks, n_jobs, copy):
"""Call filtfilt."""
# set up array for filtering, reshape to 2D, operate on last axis
from scipy.signal import filtfilt, sosfiltfilt
padlen = min(iir_params['padlen'], x.shape[-1] - 1)
x, orig_shape, picks = _prep_for_filtering(x, copy, picks)
if 'sos' in iir_params:
fun = partial(sosfiltfilt, sos=iir_params['sos'], padlen=padlen,
axis=-1)
_check_coefficients(iir_params['sos'])
else:
fun = partial(filtfilt, b=iir_params['b'], a=iir_params['a'],
padlen=padlen, axis=-1)
_check_coefficients((iir_params['b'], iir_params['a']))
parallel, p_fun, n_jobs = parallel_func(fun, n_jobs)
if n_jobs == 1:
for p in picks:
x[p] = fun(x=x[p])
else:
data_new = parallel(p_fun(x=x[p]) for p in picks)
for pp, p in enumerate(picks):
x[p] = data_new[pp]
x.shape = orig_shape
return x
def estimate_ringing_samples(system, max_try=100000):
"""Estimate filter ringing.
Parameters
----------
system : tuple | ndarray
A tuple of (b, a) or ndarray of second-order sections coefficients.
max_try : int
Approximate maximum number of samples to try.
This will be changed to a multiple of 1000.
Returns
-------
n : int
The approximate ringing.
"""
from scipy import signal
if isinstance(system, tuple): # TF
kind = 'ba'
b, a = system
zi = [0.] * (len(a) - 1)
else:
kind = 'sos'
sos = system
zi = [[0.] * 2] * len(sos)
n_per_chunk = 1000
n_chunks_max = int(np.ceil(max_try / float(n_per_chunk)))
x = np.zeros(n_per_chunk)
x[0] = 1
last_good = n_per_chunk
thresh_val = 0
for ii in range(n_chunks_max):
if kind == 'ba':
h, zi = signal.lfilter(b, a, x, zi=zi)
else:
h, zi = signal.sosfilt(sos, x, zi=zi)
x[0] = 0 # for subsequent iterations we want zero input
h = np.abs(h)
thresh_val = max(0.001 * np.max(h), thresh_val)
idx = np.where(np.abs(h) > thresh_val)[0]
if len(idx) > 0:
last_good = idx[-1]
else: # this iteration had no sufficiently lange values
idx = (ii - 1) * n_per_chunk + last_good
break
else:
warn('Could not properly estimate ringing for the filter')
idx = n_per_chunk * n_chunks_max
return idx
_ftype_dict = {
'butter': 'Butterworth',
'cheby1': 'Chebyshev I',
'cheby2': 'Chebyshev II',
'ellip': 'Cauer/elliptic',
'bessel': 'Bessel/Thomson',
}
@verbose
def construct_iir_filter(iir_params, f_pass=None, f_stop=None, sfreq=None,
btype=None, return_copy=True, verbose=None):
"""Use IIR parameters to get filtering coefficients.
This function works like a wrapper for iirdesign and iirfilter in
scipy.signal to make filter coefficients for IIR filtering. It also
estimates the number of padding samples based on the filter ringing.
It creates a new iir_params dict (or updates the one passed to the
function) with the filter coefficients ('b' and 'a') and an estimate
of the padding necessary ('padlen') so IIR filtering can be performed.
Parameters
----------
iir_params : dict
Dictionary of parameters to use for IIR filtering.
* If ``iir_params['sos']`` exists, it will be used as
second-order sections to perform IIR filtering.
.. versionadded:: 0.13
* Otherwise, if ``iir_params['b']`` and ``iir_params['a']``
exist, these will be used as coefficients to perform IIR
filtering.
* Otherwise, if ``iir_params['order']`` and
``iir_params['ftype']`` exist, these will be used with
`scipy.signal.iirfilter` to make a filter.
You should also supply ``iir_params['rs']`` and
``iir_params['rp']`` if using elliptic or Chebychev filters.
* Otherwise, if ``iir_params['gpass']`` and
``iir_params['gstop']`` exist, these will be used with
`scipy.signal.iirdesign` to design a filter.
* ``iir_params['padlen']`` defines the number of samples to pad
(and an estimate will be calculated if it is not given).
See Notes for more details.
* ``iir_params['output']`` defines the system output kind when
designing filters, either "sos" or "ba". For 0.13 the
default is 'ba' but will change to 'sos' in 0.14.
f_pass : float or list of float
Frequency for the pass-band. Low-pass and high-pass filters should
be a float, band-pass should be a 2-element list of float.
f_stop : float or list of float
Stop-band frequency (same size as f_pass). Not used if 'order' is
specified in iir_params.
sfreq : float | None
The sample rate.
btype : str
Type of filter. Should be 'lowpass', 'highpass', or 'bandpass'
(or analogous string representations known to
:func:`scipy.signal.iirfilter`).
return_copy : bool
If False, the 'sos', 'b', 'a', and 'padlen' entries in
``iir_params`` will be set inplace (if they weren't already).
Otherwise, a new ``iir_params`` instance will be created and
returned with these entries.
%(verbose)s
Returns
-------
iir_params : dict
Updated iir_params dict, with the entries (set only if they didn't
exist before) for 'sos' (or 'b', 'a'), and 'padlen' for
IIR filtering.
See Also
--------
mne.filter.filter_data
mne.io.Raw.filter
Notes
-----
This function triages calls to :func:`scipy.signal.iirfilter` and
:func:`scipy.signal.iirdesign` based on the input arguments (see
linked functions for more details).
.. versionchanged:: 0.14
Second-order sections are used in filter design by default (replacing
``output='ba'`` by ``output='sos'``) to help ensure filter stability
and reduce numerical error.
Examples
--------
iir_params can have several forms. Consider constructing a low-pass
filter at 40 Hz with 1000 Hz sampling rate.
In the most basic (2-parameter) form of iir_params, the order of the
filter 'N' and the type of filtering 'ftype' are specified. To get
coefficients for a 4th-order Butterworth filter, this would be:
>>> iir_params = dict(order=4, ftype='butter', output='sos') # doctest:+SKIP
>>> iir_params = construct_iir_filter(iir_params, 40, None, 1000, 'low', return_copy=False) # doctest:+SKIP
>>> print((2 * len(iir_params['sos']), iir_params['padlen'])) # doctest:+SKIP
(4, 82)
Filters can also be constructed using filter design methods. To get a
40 Hz Chebyshev type 1 lowpass with specific gain characteristics in the
pass and stop bands (assuming the desired stop band is at 45 Hz), this
would be a filter with much longer ringing:
>>> iir_params = dict(ftype='cheby1', gpass=3, gstop=20, output='sos') # doctest:+SKIP
>>> iir_params = construct_iir_filter(iir_params, 40, 50, 1000, 'low') # doctest:+SKIP
>>> print((2 * len(iir_params['sos']), iir_params['padlen'])) # doctest:+SKIP
(6, 439)
Padding and/or filter coefficients can also be manually specified. For
a 10-sample moving window with no padding during filtering, for example,
one can just do:
>>> iir_params = dict(b=np.ones((10)), a=[1, 0], padlen=0) # doctest:+SKIP
>>> iir_params = construct_iir_filter(iir_params, return_copy=False) # doctest:+SKIP
>>> print((iir_params['b'], iir_params['a'], iir_params['padlen'])) # doctest:+SKIP
(array([1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]), [1, 0], 0)
For more information, see the tutorials
:ref:`disc-filtering` and :ref:`tut-filter-resample`.
""" # noqa: E501
from scipy.signal import iirfilter, iirdesign, freqz, sosfreqz
known_filters = ('bessel', 'butter', 'butterworth', 'cauer', 'cheby1',
'cheby2', 'chebyshev1', 'chebyshev2', 'chebyshevi',
'chebyshevii', 'ellip', 'elliptic')
if not isinstance(iir_params, dict):
raise TypeError('iir_params must be a dict, got %s' % type(iir_params))
# if the filter has been designed, we're good to go
Wp = None
if 'sos' in iir_params:
system = iir_params['sos']
output = 'sos'
elif 'a' in iir_params and 'b' in iir_params:
system = (iir_params['b'], iir_params['a'])
output = 'ba'
else:
output = iir_params.get('output', 'sos')
_check_option('output', output, ('ba', 'sos'))
# ensure we have a valid ftype
if 'ftype' not in iir_params:
raise RuntimeError('ftype must be an entry in iir_params if ''b'' '
'and ''a'' are not specified')
ftype = iir_params['ftype']
if ftype not in known_filters:
raise RuntimeError('ftype must be in filter_dict from '
'scipy.signal (e.g., butter, cheby1, etc.) not '
'%s' % ftype)
# use order-based design
f_pass = np.atleast_1d(f_pass)
if f_pass.ndim > 1:
raise ValueError('frequencies must be 1D, got %dD' % f_pass.ndim)
edge_freqs = ', '.join('%0.2f' % (f,) for f in f_pass)
Wp = f_pass / (float(sfreq) / 2)
# IT will de designed
ftype_nice = _ftype_dict.get(ftype, ftype)
logger.info('')
logger.info('IIR filter parameters')
logger.info('---------------------')
logger.info('%s %s zero-phase (two-pass forward and reverse) '
'non-causal filter:' % (ftype_nice, btype))
# SciPy designs for -3dB but we do forward-backward, so this is -6dB
if 'order' in iir_params:
kwargs = dict(N=iir_params['order'], Wn=Wp, btype=btype,
ftype=ftype, output=output)
for key in ('rp', 'rs'):
if key in iir_params:
kwargs[key] = iir_params[key]
system = iirfilter(**kwargs)
logger.info('- Filter order %d (effective, after forward-backward)'
% (2 * iir_params['order'] * len(Wp),))
else:
# use gpass / gstop design
Ws = np.asanyarray(f_stop) / (float(sfreq) / 2)
if 'gpass' not in iir_params or 'gstop' not in iir_params:
raise ValueError('iir_params must have at least ''gstop'' and'
' ''gpass'' (or ''N'') entries')
system = iirdesign(Wp, Ws, iir_params['gpass'],
iir_params['gstop'], ftype=ftype, output=output)
if system is None:
raise RuntimeError('coefficients could not be created from iir_params')
# do some sanity checks
_check_coefficients(system)
# get the gains at the cutoff frequencies
if Wp is not None:
if output == 'sos':
cutoffs = sosfreqz(system, worN=Wp * np.pi)[1]
else:
cutoffs = freqz(system[0], system[1], worN=Wp * np.pi)[1]
# 2 * 20 here because we do forward-backward filtering
cutoffs = 40 * np.log10(np.abs(cutoffs))
cutoffs = ', '.join(['%0.2f' % (c,) for c in cutoffs])
logger.info('- Cutoff%s at %s Hz: %s dB'
% (_pl(f_pass), edge_freqs, cutoffs))
# now deal with padding
if 'padlen' not in iir_params:
padlen = estimate_ringing_samples(system)
else:
padlen = iir_params['padlen']
if return_copy:
iir_params = deepcopy(iir_params)
iir_params.update(dict(padlen=padlen))
if output == 'sos':
iir_params.update(sos=system)
else:
iir_params.update(b=system[0], a=system[1])
logger.info('')
return iir_params
def _check_method(method, iir_params, extra_types=()):
"""Parse method arguments."""
allowed_types = ['iir', 'fir', 'fft'] + list(extra_types)
_validate_type(method, 'str', 'method')
_check_option('method', method, allowed_types)
if method == 'fft':
method = 'fir' # use the better name
if method == 'iir':
if iir_params is None:
iir_params = dict()
if len(iir_params) == 0 or (len(iir_params) == 1 and
'output' in iir_params):
iir_params = dict(order=4, ftype='butter',
output=iir_params.get('output', 'sos'))
elif iir_params is not None:
raise ValueError('iir_params must be None if method != "iir"')
return iir_params, method
@verbose
def filter_data(data, sfreq, l_freq, h_freq, picks=None, filter_length='auto',
l_trans_bandwidth='auto', h_trans_bandwidth='auto',
n_jobs=None, method='fir', iir_params=None, copy=True,
phase='zero', fir_window='hamming', fir_design='firwin',
pad='reflect_limited', *, verbose=None):
"""Filter a subset of channels.
Parameters
----------
data : ndarray, shape (..., n_times)
The data to filter.
sfreq : float
The sample frequency in Hz.
%(l_freq)s
%(h_freq)s
%(picks_nostr)s
Currently this is only supported for 2D (n_channels, n_times) and
3D (n_epochs, n_channels, n_times) arrays.
%(filter_length)s
%(l_trans_bandwidth)s
%(h_trans_bandwidth)s
%(n_jobs_fir)s
%(method_fir)s
%(iir_params)s
copy : bool
If True, a copy of x, filtered, is returned. Otherwise, it operates
on x in place.
%(phase)s
%(fir_window)s
%(fir_design)s
%(pad_fir)s
The default is ``'reflect_limited'``.
.. versionadded:: 0.15
%(verbose)s
Returns
-------
data : ndarray, shape (..., n_times)
The filtered data.
See Also
--------
construct_iir_filter
create_filter
mne.io.Raw.filter
notch_filter
resample
Notes
-----
Applies a zero-phase low-pass, high-pass, band-pass, or band-stop
filter to the channels selected by ``picks``.
``l_freq`` and ``h_freq`` are the frequencies below which and above
which, respectively, to filter out of the data. Thus the uses are:
* ``l_freq < h_freq``: band-pass filter
* ``l_freq > h_freq``: band-stop filter
* ``l_freq is not None and h_freq is None``: high-pass filter
* ``l_freq is None and h_freq is not None``: low-pass filter
.. note:: If n_jobs > 1, more memory is required as
``len(picks) * n_times`` additional time points need to
be temporaily stored in memory.
For more information, see the tutorials
:ref:`disc-filtering` and :ref:`tut-filter-resample` and
:func:`mne.filter.create_filter`.
"""
data = _check_filterable(data)
iir_params, method = _check_method(method, iir_params)
filt = create_filter(
data, sfreq, l_freq, h_freq, filter_length, l_trans_bandwidth,
h_trans_bandwidth, method, iir_params, phase, fir_window, fir_design)
if method in ('fir', 'fft'):
data = _overlap_add_filter(data, filt, None, phase, picks, n_jobs,
copy, pad)
else:
data = _filtfilt(data, filt, picks, n_jobs, copy)
return data
@verbose
def create_filter(data, sfreq, l_freq, h_freq, filter_length='auto',
l_trans_bandwidth='auto', h_trans_bandwidth='auto',
method='fir', iir_params=None, phase='zero',
fir_window='hamming', fir_design='firwin', verbose=None):
r"""Create a FIR or IIR filter.
``l_freq`` and ``h_freq`` are the frequencies below which and above
which, respectively, to filter out of the data. Thus the uses are:
* ``l_freq < h_freq``: band-pass filter
* ``l_freq > h_freq``: band-stop filter
* ``l_freq is not None and h_freq is None``: high-pass filter
* ``l_freq is None and h_freq is not None``: low-pass filter
Parameters
----------
data : ndarray, shape (..., n_times) | None
The data that will be filtered. This is used for sanity checking
only. If None, no sanity checking related to the length of the signal
relative to the filter order will be performed.
sfreq : float
The sample frequency in Hz.
%(l_freq)s
%(h_freq)s
%(filter_length)s
%(l_trans_bandwidth)s
%(h_trans_bandwidth)s
%(method_fir)s
%(iir_params)s
%(phase)s
%(fir_window)s
%(fir_design)s
%(verbose)s
Returns
-------
filt : array or dict
Will be an array of FIR coefficients for method='fir', and dict
with IIR parameters for method='iir'.
See Also
--------
filter_data
Notes
-----
.. note:: For FIR filters, the *cutoff frequency*, i.e. the -6 dB point,
is in the middle of the transition band (when using phase='zero'
and fir_design='firwin'). For IIR filters, the cutoff frequency
is given by ``l_freq`` or ``h_freq`` directly, and
``l_trans_bandwidth`` and ``h_trans_bandwidth`` are ignored.
**Band-pass filter**
The frequency response is (approximately) given by::
1-| ----------
| /| | \
|H| | / | | \
| / | | \
| / | | \
0-|---------- | | --------------
| | | | | |
0 Fs1 Fp1 Fp2 Fs2 Nyq
Where:
* Fs1 = Fp1 - l_trans_bandwidth in Hz
* Fs2 = Fp2 + h_trans_bandwidth in Hz
**Band-stop filter**
The frequency response is (approximately) given by::
1-|--------- ----------
| \ /
|H| | \ /
| \ /
| \ /
0-| -----------
| | | | | |
0 Fp1 Fs1 Fs2 Fp2 Nyq
Where ``Fs1 = Fp1 + l_trans_bandwidth`` and
``Fs2 = Fp2 - h_trans_bandwidth``.
Multiple stop bands can be specified using arrays.
**Low-pass filter**
The frequency response is (approximately) given by::
1-|------------------------
| \
|H| | \
| \
| \
0-| ----------------
| | | |
0 Fp Fstop Nyq
Where ``Fstop = Fp + trans_bandwidth``.
**High-pass filter**
The frequency response is (approximately) given by::
1-| -----------------------
| /
|H| | /
| /
| /
0-|---------
| | | |
0 Fstop Fp Nyq
Where ``Fstop = Fp - trans_bandwidth``.
.. versionadded:: 0.14
"""
sfreq = float(sfreq)
if sfreq < 0:
raise ValueError('sfreq must be positive')
# If no data specified, sanity checking will be skipped
if data is None:
logger.info('No data specified. Sanity checks related to the length of'
' the signal relative to the filter order will be'
' skipped.')
if h_freq is not None:
h_freq = np.array(h_freq, float).ravel()
if (h_freq > (sfreq / 2.)).any():
raise ValueError('h_freq (%s) must be less than the Nyquist '
'frequency %s' % (h_freq, sfreq / 2.))
if l_freq is not None:
l_freq = np.array(l_freq, float).ravel()
if (l_freq == 0).all():
l_freq = None
iir_params, method = _check_method(method, iir_params)
if l_freq is None and h_freq is None:
data, sfreq, _, _, _, _, filter_length, phase, fir_window, \
fir_design = _triage_filter_params(
data, sfreq, None, None, None, None,
filter_length, method, phase, fir_window, fir_design)
if method == 'iir':
out = dict() if iir_params is None else deepcopy(iir_params)
out.update(b=np.array([1.]), a=np.array([1.]))
else:
freq = [0, sfreq / 2.]
gain = [1., 1.]
if l_freq is None and h_freq is not None:
logger.info('Setting up low-pass filter at %0.2g Hz' % (h_freq,))
data, sfreq, _, f_p, _, f_s, filter_length, phase, fir_window, \
fir_design = _triage_filter_params(
data, sfreq, None, h_freq, None, h_trans_bandwidth,
filter_length, method, phase, fir_window, fir_design)
if method == 'iir':
out = construct_iir_filter(iir_params, f_p, f_s, sfreq, 'lowpass')
else: # 'fir'
freq = [0, f_p, f_s]
gain = [1, 1, 0]
if f_s != sfreq / 2.:
freq += [sfreq / 2.]
gain += [0]
elif l_freq is not None and h_freq is None:
logger.info('Setting up high-pass filter at %0.2g Hz' % (l_freq,))
data, sfreq, pass_, _, stop, _, filter_length, phase, fir_window, \
fir_design = _triage_filter_params(
data, sfreq, l_freq, None, l_trans_bandwidth, None,
filter_length, method, phase, fir_window, fir_design)
if method == 'iir':
out = construct_iir_filter(iir_params, pass_, stop, sfreq,
'highpass')
else: # 'fir'
freq = [stop, pass_, sfreq / 2.]
gain = [0, 1, 1]
if stop != 0:
freq = [0] + freq
gain = [0] + gain
elif l_freq is not None and h_freq is not None:
if (l_freq < h_freq).any():
logger.info('Setting up band-pass filter from %0.2g - %0.2g Hz'
% (l_freq, h_freq))
data, sfreq, f_p1, f_p2, f_s1, f_s2, filter_length, phase, \
fir_window, fir_design = _triage_filter_params(
data, sfreq, l_freq, h_freq, l_trans_bandwidth,
h_trans_bandwidth, filter_length, method, phase,
fir_window, fir_design)
if method == 'iir':
out = construct_iir_filter(iir_params, [f_p1, f_p2],
[f_s1, f_s2], sfreq, 'bandpass')
else: # 'fir'
freq = [f_s1, f_p1, f_p2, f_s2]
gain = [0, 1, 1, 0]
if f_s2 != sfreq / 2.:
freq += [sfreq / 2.]
gain += [0]
if f_s1 != 0:
freq = [0] + freq
gain = [0] + gain
else:
# This could possibly be removed after 0.14 release, but might
# as well leave it in to sanity check notch_filter
if len(l_freq) != len(h_freq):
raise ValueError('l_freq and h_freq must be the same length')
msg = 'Setting up band-stop filter'
if len(l_freq) == 1:
msg += ' from %0.2g - %0.2g Hz' % (h_freq, l_freq)
logger.info(msg)
# Note: order of outputs is intentionally switched here!
data, sfreq, f_s1, f_s2, f_p1, f_p2, filter_length, phase, \
fir_window, fir_design = _triage_filter_params(
data, sfreq, h_freq, l_freq, h_trans_bandwidth,
l_trans_bandwidth, filter_length, method, phase,
fir_window, fir_design, bands='arr', reverse=True)
if method == 'iir':
if len(f_p1) != 1:
raise ValueError('Multiple stop-bands can only be used '
'with FIR filtering')
out = construct_iir_filter(iir_params, [f_p1[0], f_p2[0]],
[f_s1[0], f_s2[0]], sfreq,
'bandstop')
else: # 'fir'
freq = np.r_[f_p1, f_s1, f_s2, f_p2]
gain = np.r_[np.ones_like(f_p1), np.zeros_like(f_s1),
np.zeros_like(f_s2), np.ones_like(f_p2)]
order = np.argsort(freq)
freq = freq[order]
gain = gain[order]
if freq[0] != 0:
freq = np.r_[[0.], freq]
gain = np.r_[[1.], gain]
if freq[-1] != sfreq / 2.:
freq = np.r_[freq, [sfreq / 2.]]
gain = np.r_[gain, [1.]]
if np.any(np.abs(np.diff(gain, 2)) > 1):
raise ValueError('Stop bands are not sufficiently '
'separated.')
if method == 'fir':
out = _construct_fir_filter(sfreq, freq, gain, filter_length, phase,
fir_window, fir_design)
return out
@verbose
def notch_filter(x, Fs, freqs, filter_length='auto', notch_widths=None,
trans_bandwidth=1, method='fir', iir_params=None,
mt_bandwidth=None, p_value=0.05, picks=None, n_jobs=None,
copy=True, phase='zero', fir_window='hamming',
fir_design='firwin', pad='reflect_limited', *,
verbose=None):
r"""Notch filter for the signal x.
Applies a zero-phase notch filter to the signal x, operating on the last
dimension.
Parameters
----------
x : array
Signal to filter.
Fs : float
Sampling rate in Hz.
freqs : float | array of float | None
Frequencies to notch filter in Hz, e.g. np.arange(60, 241, 60).
None can only be used with the mode 'spectrum_fit', where an F
test is used to find sinusoidal components.
%(filter_length_notch)s
notch_widths : float | array of float | None
Width of the stop band (centred at each freq in freqs) in Hz.
If None, freqs / 200 is used.
trans_bandwidth : float
Width of the transition band in Hz.
Only used for ``method='fir'``.
%(method_fir)s
'spectrum_fit' will use multi-taper estimation of sinusoidal
components. If freqs=None and method='spectrum_fit', significant
sinusoidal components are detected using an F test, and noted by
logging.
%(iir_params)s
mt_bandwidth : float | None
The bandwidth of the multitaper windowing function in Hz.
Only used in 'spectrum_fit' mode.
p_value : float
P-value to use in F-test thresholding to determine significant
sinusoidal components to remove when method='spectrum_fit' and
freqs=None. Note that this will be Bonferroni corrected for the
number of frequencies, so large p-values may be justified.
%(picks_nostr)s
Only supported for 2D (n_channels, n_times) and 3D
(n_epochs, n_channels, n_times) data.
%(n_jobs_fir)s
copy : bool
If True, a copy of x, filtered, is returned. Otherwise, it operates
on x in place.
%(phase)s
%(fir_window)s
%(fir_design)s
%(pad_fir)s
The default is ``'reflect_limited'``.
%(verbose)s
Returns
-------
xf : array
The x array filtered.
See Also
--------
filter_data
resample
Notes
-----
The frequency response is (approximately) given by::
1-|---------- -----------
| \ /
|H| | \ /
| \ /
| \ /
0-| -
| | | | |
0 Fp1 freq Fp2 Nyq
For each freq in freqs, where ``Fp1 = freq - trans_bandwidth / 2`` and
``Fs2 = freq + trans_bandwidth / 2``.
References
----------
Multi-taper removal is inspired by code from the Chronux toolbox, see
www.chronux.org and the book "Observed Brain Dynamics" by Partha Mitra
& Hemant Bokil, Oxford University Press, New York, 2008. Please
cite this in publications if method 'spectrum_fit' is used.
"""
x = _check_filterable(x, 'notch filtered', 'notch_filter')
iir_params, method = _check_method(method, iir_params, ['spectrum_fit'])
if freqs is not None:
freqs = np.atleast_1d(freqs)
elif method != 'spectrum_fit':
raise ValueError('freqs=None can only be used with method '
'spectrum_fit')
# Only have to deal with notch_widths for non-autodetect
if freqs is not None:
if notch_widths is None:
notch_widths = freqs / 200.0
elif np.any(notch_widths < 0):
raise ValueError('notch_widths must be >= 0')
else:
notch_widths = np.atleast_1d(notch_widths)
if len(notch_widths) == 1:
notch_widths = notch_widths[0] * np.ones_like(freqs)
elif len(notch_widths) != len(freqs):
raise ValueError('notch_widths must be None, scalar, or the '
'same length as freqs')
if method in ('fir', 'iir'):
# Speed this up by computing the fourier coefficients once
tb_2 = trans_bandwidth / 2.0
lows = [freq - nw / 2.0 - tb_2
for freq, nw in zip(freqs, notch_widths)]
highs = [freq + nw / 2.0 + tb_2
for freq, nw in zip(freqs, notch_widths)]
xf = filter_data(x, Fs, highs, lows, picks, filter_length, tb_2, tb_2,
n_jobs, method, iir_params, copy, phase, fir_window,
fir_design, pad=pad)
elif method == 'spectrum_fit':
xf = _mt_spectrum_proc(x, Fs, freqs, notch_widths, mt_bandwidth,
p_value, picks, n_jobs, copy, filter_length)
return xf
def _get_window_thresh(n_times, sfreq, mt_bandwidth, p_value):
# max taper size chosen because it has an max error < 1e-3:
# >>> np.max(np.diff(dpss_windows(953, 4, 100)[0]))
# 0.00099972447657578449
# so we use 1000 because it's the first "nice" number bigger than 953.
# but if we have a new enough scipy,
# it's only ~0.175 sec for 8 tapers even with 100000 samples
from scipy import stats
dpss_n_times_max = 100000
# figure out what tapers to use
window_fun, _, _ = _compute_mt_params(
n_times, sfreq, mt_bandwidth, False, False,
interp_from=min(n_times, dpss_n_times_max), verbose=False)
# F-stat of 1-p point
threshold = stats.f.ppf(1 - p_value / n_times, 2, 2 * len(window_fun) - 2)
return window_fun, threshold
def _mt_spectrum_proc(x, sfreq, line_freqs, notch_widths, mt_bandwidth,
p_value, picks, n_jobs, copy, filter_length):
"""Call _mt_spectrum_remove."""
# set up array for filtering, reshape to 2D, operate on last axis
x, orig_shape, picks = _prep_for_filtering(x, copy, picks)
if isinstance(filter_length, str) and filter_length == 'auto':
filter_length = '10s'
if filter_length is None:
filter_length = x.shape[-1]
filter_length = min(_to_samples(filter_length, sfreq, '', ''), x.shape[-1])
get_wt = partial(
_get_window_thresh, sfreq=sfreq, mt_bandwidth=mt_bandwidth,
p_value=p_value)
window_fun, threshold = get_wt(filter_length)
parallel, p_fun, n_jobs = parallel_func(_mt_spectrum_remove_win, n_jobs)
if n_jobs == 1:
freq_list = list()
for ii, x_ in enumerate(x):
if ii in picks:
x[ii], f = _mt_spectrum_remove_win(
x_, sfreq, line_freqs, notch_widths, window_fun, threshold,
get_wt)
freq_list.append(f)
else:
data_new = parallel(p_fun(x_, sfreq, line_freqs, notch_widths,
window_fun, threshold, get_wt)
for xi, x_ in enumerate(x)
if xi in picks)
freq_list = [d[1] for d in data_new]
data_new = np.array([d[0] for d in data_new])
x[picks, :] = data_new
# report found frequencies, but do some sanitizing first by binning into
# 1 Hz bins
counts = Counter(sum((np.unique(np.round(ff)).tolist()
for f in freq_list for ff in f), list()))
kind = 'Detected' if line_freqs is None else 'Removed'
found_freqs = '\n'.join(f' {freq:6.2f} : '
f'{counts[freq]:4d} window{_pl(counts[freq])}'
for freq in sorted(counts)) or ' None'
logger.info(f'{kind} notch frequencies (Hz):\n{found_freqs}')
x.shape = orig_shape
return x
def _mt_spectrum_remove_win(x, sfreq, line_freqs, notch_widths,
window_fun, threshold, get_thresh):
n_times = x.shape[-1]
n_samples = window_fun.shape[1]
n_overlap = (n_samples + 1) // 2
x_out = np.zeros_like(x)
rm_freqs = list()
idx = [0]
# Define how to process a chunk of data
def process(x_):
out = _mt_spectrum_remove(
x_, sfreq, line_freqs, notch_widths, window_fun, threshold,
get_thresh)
rm_freqs.append(out[1])
return (out[0],) # must return a tuple
# Define how to store a chunk of fully processed data (it's trivial)
def store(x_):
stop = idx[0] + x_.shape[-1]
x_out[..., idx[0]:stop] += x_
idx[0] = stop
_COLA(process, store, n_times, n_samples, n_overlap, sfreq,
verbose=False).feed(x)
assert idx[0] == n_times
return x_out, rm_freqs
def _mt_spectrum_remove(x, sfreq, line_freqs, notch_widths,
window_fun, threshold, get_thresh):
"""Use MT-spectrum to remove line frequencies.
Based on Chronux. If line_freqs is specified, all freqs within notch_width
of each line_freq is set to zero.
"""
assert x.ndim == 1
if x.shape[-1] != window_fun.shape[-1]:
window_fun, threshold = get_thresh(x.shape[-1])
# drop the even tapers
n_tapers = len(window_fun)
tapers_odd = np.arange(0, n_tapers, 2)
tapers_even = np.arange(1, n_tapers, 2)
tapers_use = window_fun[tapers_odd]
# sum tapers for (used) odd prolates across time (n_tapers, 1)
H0 = np.sum(tapers_use, axis=1)
# sum of squares across tapers (1, )
H0_sq = sum_squared(H0)
# make "time" vector
rads = 2 * np.pi * (np.arange(x.size) / float(sfreq))
# compute mt_spectrum (returning n_ch, n_tapers, n_freq)
x_p, freqs = _mt_spectra(x[np.newaxis, :], window_fun, sfreq)
# sum of the product of x_p and H0 across tapers (1, n_freqs)
x_p_H0 = np.sum(x_p[:, tapers_odd, :] *
H0[np.newaxis, :, np.newaxis], axis=1)
# resulting calculated amplitudes for all freqs
A = x_p_H0 / H0_sq
if line_freqs is None:
# figure out which freqs to remove using F stat
# estimated coefficient
x_hat = A * H0[:, np.newaxis]
# numerator for F-statistic
num = (n_tapers - 1) * (A * A.conj()).real * H0_sq
# denominator for F-statistic
den = (np.sum(np.abs(x_p[:, tapers_odd, :] - x_hat) ** 2, 1) +
np.sum(np.abs(x_p[:, tapers_even, :]) ** 2, 1))
den[den == 0] = np.inf
f_stat = num / den
# find frequencies to remove
indices = np.where(f_stat > threshold)[1]
rm_freqs = freqs[indices]
else:
# specify frequencies
indices_1 = np.unique([np.argmin(np.abs(freqs - lf))
for lf in line_freqs])
indices_2 = [np.logical_and(freqs > lf - nw / 2., freqs < lf + nw / 2.)
for lf, nw in zip(line_freqs, notch_widths)]
indices_2 = np.where(np.any(np.array(indices_2), axis=0))[0]
indices = np.unique(np.r_[indices_1, indices_2])
rm_freqs = freqs[indices]
fits = list()
for ind in indices:
c = 2 * A[0, ind]
fit = np.abs(c) * np.cos(freqs[ind] * rads + np.angle(c))
fits.append(fit)
if len(fits) == 0:
datafit = 0.0
else:
# fitted sinusoids are summed, and subtracted from data
datafit = np.sum(fits, axis=0)
return x - datafit, rm_freqs
def _check_filterable(x, kind='filtered', alternative='filter'):
# Let's be fairly strict about this -- users can easily coerce to ndarray
# at their end, and we already should do it internally any time we are
# using these low-level functions. At the same time, let's
# help people who might accidentally use low-level functions that they
# shouldn't use by pushing them in the right direction
from .io.base import BaseRaw
from .epochs import BaseEpochs
from .evoked import Evoked
if isinstance(x, (BaseRaw, BaseEpochs, Evoked)):
try:
name = x.__class__.__name__
except Exception:
pass
else:
raise TypeError(
'This low-level function only operates on np.ndarray '
f'instances. To get a {kind} {name} instance, use a method '
f'like `inst_new = inst.copy().{alternative}(...)` '
'instead.')
_validate_type(x, (np.ndarray, list, tuple), f'Data to be {kind}')
x = np.asanyarray(x)
if x.dtype != np.float64:
raise ValueError('Data to be %s must be real floating, got %s'
% (kind, x.dtype,))
return x
def _resamp_ratio_len(up, down, n):
ratio = float(up) / down
return ratio, max(int(round(ratio * n)), 1)
@verbose
def resample(x, up=1., down=1., npad=100, axis=-1, window='boxcar',
n_jobs=None, pad='reflect_limited', *, verbose=None):
"""Resample an array.
Operates along the last dimension of the array.
Parameters
----------
x : ndarray
Signal to resample.
up : float
Factor to upsample by.
down : float
Factor to downsample by.
%(npad)s
axis : int
Axis along which to resample (default is the last axis).
%(window_resample)s
%(n_jobs_cuda)s
%(pad)s
The default is ``'reflect_limited'``.
.. versionadded:: 0.15
%(verbose)s
Returns
-------
y : array
The x array resampled.
Notes
-----
This uses (hopefully) intelligent edge padding and frequency-domain
windowing improve scipy.signal.resample's resampling method, which
we have adapted for our use here. Choices of npad and window have
important consequences, and the default choices should work well
for most natural signals.
Resampling arguments are broken into "up" and "down" components for future
compatibility in case we decide to use an upfirdn implementation. The
current implementation is functionally equivalent to passing
up=up/down and down=1.
"""
from scipy.signal import get_window
from scipy.fft import ifftshift, fftfreq
# check explicitly for backwards compatibility
if not isinstance(axis, int):
err = ("The axis parameter needs to be an integer (got %s). "
"The axis parameter was missing from this function for a "
"period of time, you might be intending to specify the "
"subsequent window parameter." % repr(axis))
raise TypeError(err)
# make sure our arithmetic will work
x = _check_filterable(x, 'resampled', 'resample')
ratio, final_len = _resamp_ratio_len(up, down, x.shape[axis])
del up, down
if axis < 0:
axis = x.ndim + axis
orig_last_axis = x.ndim - 1
if axis != orig_last_axis:
x = x.swapaxes(axis, orig_last_axis)
orig_shape = x.shape
x_len = orig_shape[-1]
if x_len == 0:
warn('x has zero length along last axis, returning a copy of x')
return x.copy()
bad_msg = 'npad must be "auto" or an integer'
if isinstance(npad, str):
if npad != 'auto':
raise ValueError(bad_msg)
# Figure out reasonable pad that gets us to a power of 2
min_add = min(x_len // 8, 100) * 2
npad = 2 ** int(np.ceil(np.log2(x_len + min_add))) - x_len
npad, extra = divmod(npad, 2)
npads = np.array([npad, npad + extra], int)
else:
if npad != int(npad):
raise ValueError(bad_msg)
npads = np.array([npad, npad], int)
del npad
# prep for resampling now
x_flat = x.reshape((-1, x_len))
orig_len = x_len + npads.sum() # length after padding
new_len = max(int(round(ratio * orig_len)), 1) # length after resampling
to_removes = [int(round(ratio * npads[0]))]
to_removes.append(new_len - final_len - to_removes[0])
to_removes = np.array(to_removes)
# This should hold:
# assert np.abs(to_removes[1] - to_removes[0]) <= int(np.ceil(ratio))
# figure out windowing function
if window is not None:
if callable(window):
W = window(fftfreq(orig_len))
elif isinstance(window, np.ndarray) and \
window.shape == (orig_len,):
W = window
else:
W = ifftshift(get_window(window, orig_len))
else:
W = np.ones(orig_len)
W *= (float(new_len) / float(orig_len))
# figure out if we should use CUDA
n_jobs, cuda_dict = _setup_cuda_fft_resample(n_jobs, W, new_len)
# do the resampling using an adaptation of scipy's FFT-based resample()
# use of the 'flat' window is recommended for minimal ringing
parallel, p_fun, n_jobs = parallel_func(_fft_resample, n_jobs)
if n_jobs == 1:
y = np.zeros((len(x_flat), new_len - to_removes.sum()), dtype=x.dtype)
for xi, x_ in enumerate(x_flat):
y[xi] = _fft_resample(x_, new_len, npads, to_removes,
cuda_dict, pad)
else:
y = parallel(p_fun(x_, new_len, npads, to_removes, cuda_dict, pad)
for x_ in x_flat)
y = np.array(y)
# Restore the original array shape (modified for resampling)
y.shape = orig_shape[:-1] + (y.shape[1],)
if axis != orig_last_axis:
y = y.swapaxes(axis, orig_last_axis)
assert y.shape[axis] == final_len
return y
def _resample_stim_channels(stim_data, up, down):
"""Resample stim channels, carefully.
Parameters
----------
stim_data : array, shape (n_samples,) or (n_stim_channels, n_samples)
Stim channels to resample.
up : float
Factor to upsample by.
down : float
Factor to downsample by.
Returns
-------
stim_resampled : array, shape (n_stim_channels, n_samples_resampled)
The resampled stim channels.
Note
----
The approach taken here is equivalent to the approach in the C-code.
See the decimate_stimch function in MNE/mne_browse_raw/save.c
"""
stim_data = np.atleast_2d(stim_data)
n_stim_channels, n_samples = stim_data.shape
ratio = float(up) / down
resampled_n_samples = int(round(n_samples * ratio))
stim_resampled = np.zeros((n_stim_channels, resampled_n_samples))
# Figure out which points in old data to subsample protect against
# out-of-bounds, which can happen (having one sample more than
# expected) due to padding
sample_picks = np.minimum(
(np.arange(resampled_n_samples) / ratio).astype(int),
n_samples - 1
)
# Create windows starting from sample_picks[i], ending at sample_picks[i+1]
windows = zip(sample_picks, np.r_[sample_picks[1:], n_samples])
# Use the first non-zero value in each window
for window_i, window in enumerate(windows):
for stim_num, stim in enumerate(stim_data):
nonzero = stim[window[0]:window[1]].nonzero()[0]
if len(nonzero) > 0:
val = stim[window[0] + nonzero[0]]
else:
val = stim[window[0]]
stim_resampled[stim_num, window_i] = val
return stim_resampled
def detrend(x, order=1, axis=-1):
"""Detrend the array x.
Parameters
----------
x : n-d array
Signal to detrend.
order : int
Fit order. Currently must be '0' or '1'.
axis : int
Axis of the array to operate on.
Returns
-------
y : array
The x array detrended.
Examples
--------
As in :func:`scipy.signal.detrend`::
>>> randgen = np.random.RandomState(9)
>>> npoints = int(1e3)
>>> noise = randgen.randn(npoints)
>>> x = 3 + 2*np.linspace(0, 1, npoints) + noise
>>> (detrend(x) - noise).max() < 0.01
True
"""
from scipy.signal import detrend
if axis > len(x.shape):
raise ValueError('x does not have %d axes' % axis)
if order == 0:
fit = 'constant'
elif order == 1:
fit = 'linear'
else:
raise ValueError('order must be 0 or 1')
y = detrend(x, axis=axis, type=fit)
return y
# Taken from Ifeachor and Jervis p. 356.
# Note that here the passband ripple and stopband attenuation are
# rendundant. The scalar passband ripple δp is expressed in dB as
# 20 * log10(1+δp), but the scalar stopband ripple δs is expressed in dB as
# -20 * log10(δs). So if we know that our stopband attenuation is 53 dB
# (Hamming) then δs = 10 ** (53 / -20.), which means that the passband
# deviation should be 20 * np.log10(1 + 10 ** (53 / -20.)) == 0.0194.
_fir_window_dict = {
'hann': dict(name='Hann', ripple=0.0546, attenuation=44),
'hamming': dict(name='Hamming', ripple=0.0194, attenuation=53),
'blackman': dict(name='Blackman', ripple=0.0017, attenuation=74),
}
_known_fir_windows = tuple(sorted(_fir_window_dict.keys()))
_known_phases = ('linear', 'zero', 'zero-double', 'minimum')
_known_fir_designs = ('firwin', 'firwin2')
_fir_design_dict = {
'firwin': 'Windowed time-domain',
'firwin2': 'Windowed frequency-domain',
}
def _to_samples(filter_length, sfreq, phase, fir_design):
_validate_type(filter_length, (str, 'int-like'), 'filter_length')
if isinstance(filter_length, str):
filter_length = filter_length.lower()
err_msg = ('filter_length, if a string, must be a '
'human-readable time, e.g. "10s", or "auto", not '
'"%s"' % filter_length)
if filter_length.lower().endswith('ms'):
mult_fact = 1e-3
filter_length = filter_length[:-2]
elif filter_length[-1].lower() == 's':
mult_fact = 1
filter_length = filter_length[:-1]
else:
raise ValueError(err_msg)
# now get the number
try:
filter_length = float(filter_length)
except ValueError:
raise ValueError(err_msg)
filter_length = max(int(np.ceil(filter_length * mult_fact *
sfreq)), 1)
if fir_design == 'firwin':
filter_length += (filter_length - 1) % 2
filter_length = _ensure_int(filter_length, 'filter_length')
return filter_length
def _triage_filter_params(x, sfreq, l_freq, h_freq,
l_trans_bandwidth, h_trans_bandwidth,
filter_length, method, phase, fir_window,
fir_design, bands='scalar', reverse=False):
"""Validate and automate filter parameter selection."""
_validate_type(phase, 'str', 'phase')
_check_option('phase', phase, _known_phases)
_validate_type(fir_window, 'str', 'fir_window')
_check_option('fir_window', fir_window, _known_fir_windows)
_validate_type(fir_design, 'str', 'fir_design')
_check_option('fir_design', fir_design, _known_fir_designs)
# Helpers for reporting
report_phase = 'non-linear phase' if phase == 'minimum' else 'zero-phase'
causality = 'causal' if phase == 'minimum' else 'non-causal'
if phase == 'zero-double':
report_pass = 'two-pass forward and reverse'
else:
report_pass = 'one-pass'
if l_freq is not None:
if h_freq is not None:
kind = 'bandstop' if reverse else 'bandpass'
else:
kind = 'highpass'
assert not reverse
elif h_freq is not None:
kind = 'lowpass'
assert not reverse
else:
kind = 'allpass'
def float_array(c):
return np.array(c, float).ravel()
if bands == 'arr':
cast = float_array
else:
cast = float
sfreq = float(sfreq)
if l_freq is not None:
l_freq = cast(l_freq)
if np.any(l_freq <= 0):
raise ValueError('highpass frequency %s must be greater than zero'
% (l_freq,))
if h_freq is not None:
h_freq = cast(h_freq)
if np.any(h_freq >= sfreq / 2.):
raise ValueError('lowpass frequency %s must be less than Nyquist '
'(%s)' % (h_freq, sfreq / 2.))
dB_cutoff = False # meaning, don't try to compute or report
if bands == 'scalar' or (len(h_freq) == 1 and len(l_freq) == 1):
if phase == 'zero':
dB_cutoff = '-6 dB'
elif phase == 'zero-double':
dB_cutoff = '-12 dB'
# we go to the next power of two when in FIR and zero-double mode
if method == 'iir':
# Ignore these parameters, effectively
l_stop, h_stop = l_freq, h_freq
else: # method == 'fir'
l_stop = h_stop = None
logger.info('')
logger.info('FIR filter parameters')
logger.info('---------------------')
logger.info('Designing a %s, %s, %s %s filter:'
% (report_pass, report_phase, causality, kind))
logger.info('- %s design (%s) method'
% (_fir_design_dict[fir_design], fir_design))
this_dict = _fir_window_dict[fir_window]
if fir_design == 'firwin':
logger.info('- {name:s} window with {ripple:0.4f} passband ripple '
'and {attenuation:d} dB stopband attenuation'
.format(**this_dict))
else:
logger.info('- {name:s} window'.format(**this_dict))
if l_freq is not None: # high-pass component
if isinstance(l_trans_bandwidth, str):
if l_trans_bandwidth != 'auto':
raise ValueError('l_trans_bandwidth must be "auto" if '
'string, got "%s"' % l_trans_bandwidth)
l_trans_bandwidth = np.minimum(np.maximum(0.25 * l_freq, 2.),
l_freq)
msg = ('- Lower transition bandwidth: %0.2f Hz'
% (l_trans_bandwidth))
if dB_cutoff:
logger.info('- Lower passband edge: %0.2f' % (l_freq,))
msg += ' (%s cutoff frequency: %0.2f Hz)' % (
dB_cutoff, l_freq - l_trans_bandwidth / 2.)
logger.info(msg)
l_trans_bandwidth = cast(l_trans_bandwidth)
if np.any(l_trans_bandwidth <= 0):
raise ValueError('l_trans_bandwidth must be positive, got %s'
% (l_trans_bandwidth,))
l_stop = l_freq - l_trans_bandwidth
if reverse: # band-stop style
l_stop += l_trans_bandwidth
l_freq += l_trans_bandwidth
if np.any(l_stop < 0):
raise ValueError('Filter specification invalid: Lower stop '
'frequency negative (%0.2f Hz). Increase pass'
' frequency or reduce the transition '
'bandwidth (l_trans_bandwidth)' % l_stop)
if h_freq is not None: # low-pass component
if isinstance(h_trans_bandwidth, str):
if h_trans_bandwidth != 'auto':
raise ValueError('h_trans_bandwidth must be "auto" if '
'string, got "%s"' % h_trans_bandwidth)
h_trans_bandwidth = np.minimum(np.maximum(0.25 * h_freq, 2.),
sfreq / 2. - h_freq)
msg = ('- Upper transition bandwidth: %0.2f Hz'
% (h_trans_bandwidth))
if dB_cutoff:
logger.info('- Upper passband edge: %0.2f Hz' % (h_freq,))
msg += ' (%s cutoff frequency: %0.2f Hz)' % (
dB_cutoff, h_freq + h_trans_bandwidth / 2.)
logger.info(msg)
h_trans_bandwidth = cast(h_trans_bandwidth)
if np.any(h_trans_bandwidth <= 0):
raise ValueError('h_trans_bandwidth must be positive, got %s'
% (h_trans_bandwidth,))
h_stop = h_freq + h_trans_bandwidth
if reverse: # band-stop style
h_stop -= h_trans_bandwidth
h_freq -= h_trans_bandwidth
if np.any(h_stop > sfreq / 2):
raise ValueError('Effective band-stop frequency (%s) is too '
'high (maximum based on Nyquist is %s)'
% (h_stop, sfreq / 2.))
if isinstance(filter_length, str) and filter_length.lower() == 'auto':
filter_length = filter_length.lower()
h_check = h_trans_bandwidth if h_freq is not None else np.inf
l_check = l_trans_bandwidth if l_freq is not None else np.inf
mult_fact = 2. if fir_design == 'firwin2' else 1.
filter_length = '%ss' % (_length_factors[fir_window] * mult_fact /
float(min(h_check, l_check)),)
next_pow_2 = False # disable old behavior
else:
next_pow_2 = (
isinstance(filter_length, str) and phase == 'zero-double')
filter_length = _to_samples(filter_length, sfreq, phase, fir_design)
# use correct type of filter (must be odd length for firwin and for
# zero phase)
if fir_design == 'firwin' or phase == 'zero':
filter_length += (filter_length - 1) % 2
logger.info('- Filter length: %s samples (%0.3f sec)'
% (filter_length, filter_length / sfreq))
logger.info('')
if filter_length <= 0:
raise ValueError('filter_length must be positive, got %s'
% (filter_length,))
if next_pow_2:
filter_length = 2 ** int(np.ceil(np.log2(filter_length)))
if fir_design == 'firwin':
filter_length += (filter_length - 1) % 2
# If we have data supplied, do a sanity check
if x is not None:
x = _check_filterable(x)
len_x = x.shape[-1]
if method != 'fir':
filter_length = len_x
if filter_length > len_x and not (l_freq is None and h_freq is None):
warn('filter_length (%s) is longer than the signal (%s), '
'distortion is likely. Reduce filter length or filter a '
'longer signal.' % (filter_length, len_x))
logger.debug('Using filter length: %s' % filter_length)
return (x, sfreq, l_freq, h_freq, l_stop, h_stop, filter_length, phase,
fir_window, fir_design)
class FilterMixin(object):
"""Object for Epoch/Evoked filtering."""
@verbose
def savgol_filter(self, h_freq, verbose=None):
"""Filter the data using Savitzky-Golay polynomial method.
Parameters
----------
h_freq : float
Approximate high cut-off frequency in Hz. Note that this
is not an exact cutoff, since Savitzky-Golay filtering
:footcite:`SavitzkyGolay1964` is done using polynomial fits
instead of FIR/IIR filtering. This parameter is thus used to
determine the length of the window over which a 5th-order
polynomial smoothing is used.
%(verbose)s
Returns
-------
inst : instance of Epochs or Evoked
The object with the filtering applied.
See Also
--------
mne.io.Raw.filter
Notes
-----
For Savitzky-Golay low-pass approximation, see:
https://gist.github.com/larsoner/bbac101d50176611136b
.. versionadded:: 0.9.0
References
----------
.. footbibliography::
Examples
--------
>>> import mne
>>> from os import path as op
>>> evoked_fname = op.join(mne.datasets.sample.data_path(), 'MEG', 'sample', 'sample_audvis-ave.fif') # doctest:+SKIP
>>> evoked = mne.read_evokeds(evoked_fname, baseline=(None, 0))[0] # doctest:+SKIP
>>> evoked.savgol_filter(10.) # low-pass at around 10 Hz # doctest:+SKIP
>>> evoked.plot() # doctest:+SKIP
""" # noqa: E501
from scipy.signal import savgol_filter
_check_preload(self, 'inst.savgol_filter')
h_freq = float(h_freq)
if h_freq >= self.info['sfreq'] / 2.:
raise ValueError('h_freq must be less than half the sample rate')
# savitzky-golay filtering
window_length = (int(np.round(self.info['sfreq'] /
h_freq)) // 2) * 2 + 1
logger.info('Using savgol length %d' % window_length)
self._data[:] = savgol_filter(self._data, axis=-1, polyorder=5,
window_length=window_length)
return self
@verbose
def filter(self, l_freq, h_freq, picks=None, filter_length='auto',
l_trans_bandwidth='auto', h_trans_bandwidth='auto', n_jobs=None,
method='fir', iir_params=None, phase='zero',
fir_window='hamming', fir_design='firwin',
skip_by_annotation=('edge', 'bad_acq_skip'), pad='edge', *,
verbose=None):
"""Filter a subset of channels.
Parameters
----------
%(l_freq)s
%(h_freq)s
%(picks_all_data)s
%(filter_length)s
%(l_trans_bandwidth)s
%(h_trans_bandwidth)s
%(n_jobs_fir)s
%(method_fir)s
%(iir_params)s
%(phase)s
%(fir_window)s
%(fir_design)s
skip_by_annotation : str | list of str
If a string (or list of str), any annotation segment that begins
with the given string will not be included in filtering, and
segments on either side of the given excluded annotated segment
will be filtered separately (i.e., as independent signals).
The default (``('edge', 'bad_acq_skip')`` will separately filter
any segments that were concatenated by :func:`mne.concatenate_raws`
or :meth:`mne.io.Raw.append`, or separated during acquisition.
To disable, provide an empty list. Only used if ``inst`` is raw.
.. versionadded:: 0.16.
%(pad_fir)s
%(verbose)s
Returns
-------
inst : instance of Epochs, Evoked, or Raw
The filtered data.
See Also
--------
mne.filter.create_filter
mne.Evoked.savgol_filter
mne.io.Raw.notch_filter
mne.io.Raw.resample
mne.filter.create_filter
mne.filter.filter_data
mne.filter.construct_iir_filter
Notes
-----
Applies a zero-phase low-pass, high-pass, band-pass, or band-stop
filter to the channels selected by ``picks``.
The data are modified inplace.
The object has to have the data loaded e.g. with ``preload=True``
or ``self.load_data()``.
``l_freq`` and ``h_freq`` are the frequencies below which and above
which, respectively, to filter out of the data. Thus the uses are:
* ``l_freq < h_freq``: band-pass filter
* ``l_freq > h_freq``: band-stop filter
* ``l_freq is not None and h_freq is None``: high-pass filter
* ``l_freq is None and h_freq is not None``: low-pass filter
``self.info['lowpass']`` and ``self.info['highpass']`` are only
updated with picks=None.
.. note:: If n_jobs > 1, more memory is required as
``len(picks) * n_times`` additional time points need to
be temporaily stored in memory.
For more information, see the tutorials
:ref:`disc-filtering` and :ref:`tut-filter-resample` and
:func:`mne.filter.create_filter`.
.. versionadded:: 0.15
"""
from .io.base import BaseRaw
_check_preload(self, 'inst.filter')
if pad is None and method != 'iir':
pad = 'edge'
update_info, picks = _filt_check_picks(self.info, picks,
l_freq, h_freq)
if isinstance(self, BaseRaw):
# Deal with annotations
onsets, ends = _annotations_starts_stops(
self, skip_by_annotation, invert=True)
logger.info('Filtering raw data in %d contiguous segment%s'
% (len(onsets), _pl(onsets)))
else:
onsets, ends = np.array([0]), np.array([self._data.shape[1]])
max_idx = (ends - onsets).argmax()
for si, (start, stop) in enumerate(zip(onsets, ends)):
# Only output filter params once (for info level), and only warn
# once about the length criterion (longest segment is too short)
use_verbose = verbose if si == max_idx else 'error'
filter_data(
self._data[:, start:stop], self.info['sfreq'], l_freq, h_freq,
picks, filter_length, l_trans_bandwidth, h_trans_bandwidth,
n_jobs, method, iir_params, copy=False, phase=phase,
fir_window=fir_window, fir_design=fir_design, pad=pad,
verbose=use_verbose)
# update info if filter is applied to all data channels,
# and it's not a band-stop filter
_filt_update_info(self.info, update_info, l_freq, h_freq)
return self
@verbose
def resample(self, sfreq, npad='auto', window='boxcar', n_jobs=None,
pad='edge', *, verbose=None): # lgtm
"""Resample data.
If appropriate, an anti-aliasing filter is applied before resampling.
See :ref:`resampling-and-decimating` for more information.
.. note:: Data must be loaded.
Parameters
----------
sfreq : float
New sample rate to use.
%(npad)s
%(window_resample)s
%(n_jobs_cuda)s
%(pad)s
The default is ``'edge'``, which pads with the edge values of each
vector.
.. versionadded:: 0.15
%(verbose)s
Returns
-------
inst : instance of Epochs or Evoked
The resampled object.
See Also
--------
mne.io.Raw.resample
Notes
-----
For some data, it may be more accurate to use npad=0 to reduce
artifacts. This is dataset dependent -- check your data!
"""
from .epochs import BaseEpochs
from .evoked import Evoked
# Should be guaranteed by our inheritance, and the fact that
# mne.io.base.BaseRaw overrides this method
assert isinstance(self, (BaseEpochs, Evoked))
_check_preload(self, 'inst.resample')
sfreq = float(sfreq)
o_sfreq = self.info['sfreq']
self._data = resample(self._data, sfreq, o_sfreq, npad, window=window,
n_jobs=n_jobs, pad=pad)
lowpass = self.info.get('lowpass')
lowpass = np.inf if lowpass is None else lowpass
with self.info._unlock():
self.info['lowpass'] = min(lowpass, sfreq / 2.)
self.info['sfreq'] = float(sfreq)
new_times = (np.arange(self._data.shape[-1], dtype=np.float64) /
sfreq + self.times[0])
# adjust indirectly affected variables
self._set_times(new_times)
self._raw_times = self.times
self._update_first_last()
return self
@verbose
def apply_hilbert(self, picks=None, envelope=False, n_jobs=None,
n_fft='auto', *, verbose=None):
"""Compute analytic signal or envelope for a subset of channels.
Parameters
----------
%(picks_all_data_noref)s
envelope : bool
Compute the envelope signal of each channel. Default False.
See Notes.
%(n_jobs)s
n_fft : int | None | str
Points to use in the FFT for Hilbert transformation. The signal
will be padded with zeros before computing Hilbert, then cut back
to original length. If None, n == self.n_times. If 'auto',
the next highest fast FFT length will be use.
%(verbose)s
Returns
-------
self : instance of Raw, Epochs, or Evoked
The raw object with transformed data.
Notes
-----
**Parameters**
If ``envelope=False``, the analytic signal for the channels defined in
``picks`` is computed and the data of the Raw object is converted to
a complex representation (the analytic signal is complex valued).
If ``envelope=True``, the absolute value of the analytic signal for the
channels defined in ``picks`` is computed, resulting in the envelope
signal.
.. warning: Do not use ``envelope=True`` if you intend to compute
an inverse solution from the raw data. If you want to
compute the envelope in source space, use
``envelope=False`` and compute the envelope after the
inverse solution has been obtained.
If envelope=False, more memory is required since the original raw data
as well as the analytic signal have temporarily to be stored in memory.
If n_jobs > 1, more memory is required as ``len(picks) * n_times``
additional time points need to be temporaily stored in memory.
Also note that the ``n_fft`` parameter will allow you to pad the signal
with zeros before performing the Hilbert transform. This padding
is cut off, but it may result in a slightly different result
(particularly around the edges). Use at your own risk.
**Analytic signal**
The analytic signal "x_a(t)" of "x(t)" is::
x_a = F^{-1}(F(x) 2U) = x + i y
where "F" is the Fourier transform, "U" the unit step function,
and "y" the Hilbert transform of "x". One usage of the analytic
signal is the computation of the envelope signal, which is given by
"e(t) = abs(x_a(t))". Due to the linearity of Hilbert transform and the
MNE inverse solution, the enevlope in source space can be obtained
by computing the analytic signal in sensor space, applying the MNE
inverse, and computing the envelope in source space.
"""
_check_preload(self, 'inst.apply_hilbert')
if n_fft is None:
n_fft = len(self.times)
elif isinstance(n_fft, str):
if n_fft != 'auto':
raise ValueError('n_fft must be an integer, string, or None, '
'got %s' % (type(n_fft),))
n_fft = next_fast_len(len(self.times))
n_fft = int(n_fft)
if n_fft < len(self.times):
raise ValueError("n_fft (%d) must be at least the number of time "
"points (%d)" % (n_fft, len(self.times)))
dtype = None if envelope else np.complex128
picks = _picks_to_idx(self.info, picks, exclude=(), with_ref_meg=False)
args, kwargs = (), dict(n_fft=n_fft, envelope=envelope)
data_in = self._data
if dtype is not None and dtype != self._data.dtype:
self._data = self._data.astype(dtype)
parallel, p_fun, n_jobs = parallel_func(_check_fun, n_jobs)
if n_jobs == 1:
# modify data inplace to save memory
for idx in picks:
self._data[..., idx, :] = _check_fun(
_my_hilbert, data_in[..., idx, :], *args, **kwargs)
else:
# use parallel function
data_picks_new = parallel(
p_fun(_my_hilbert, data_in[..., p, :], *args, **kwargs)
for p in picks)
for pp, p in enumerate(picks):
self._data[..., p, :] = data_picks_new[pp]
return self
def _check_fun(fun, d, *args, **kwargs):
"""Check shapes."""
want_shape = d.shape
d = fun(d, *args, **kwargs)
if not isinstance(d, np.ndarray):
raise TypeError('Return value must be an ndarray')
if d.shape != want_shape:
raise ValueError('Return data must have shape %s not %s'
% (want_shape, d.shape))
return d
def _my_hilbert(x, n_fft=None, envelope=False):
"""Compute Hilbert transform of signals w/ zero padding.
Parameters
----------
x : array, shape (n_times)
The signal to convert
n_fft : int
Size of the FFT to perform, must be at least ``len(x)``.
The signal will be cut back to original length.
envelope : bool
Whether to compute amplitude of the hilbert transform in order
to return the signal envelope.
Returns
-------
out : array, shape (n_times)
The hilbert transform of the signal, or the envelope.
"""
from scipy.signal import hilbert
n_x = x.shape[-1]
out = hilbert(x, N=n_fft, axis=-1)[..., :n_x]
if envelope:
out = np.abs(out)
return out
@verbose
def design_mne_c_filter(sfreq, l_freq=None, h_freq=40.,
l_trans_bandwidth=None, h_trans_bandwidth=5.,
verbose=None):
"""Create a FIR filter like that used by MNE-C.
Parameters
----------
sfreq : float
The sample frequency.
l_freq : float | None
The low filter frequency in Hz, default None.
Can be None to avoid high-passing.
h_freq : float
The high filter frequency in Hz, default 40.
Can be None to avoid low-passing.
l_trans_bandwidth : float | None
Low transition bandwidthin Hz. Can be None (default) to use 3 samples.
h_trans_bandwidth : float
High transition bandwidth in Hz.
%(verbose)s
Returns
-------
h : ndarray, shape (8193,)
The linear-phase (symmetric) FIR filter coefficients.
Notes
-----
This function is provided mostly for reference purposes.
MNE-C uses a frequency-domain filter design technique by creating a
linear-phase filter of length 8193. In the frequency domain, the
4197 frequencies are directly constructed, with zeroes in the stop-band
and ones in the passband, with squared cosine ramps in between.
"""
from scipy.fft import irfft
n_freqs = (4096 + 2 * 2048) // 2 + 1
freq_resp = np.ones(n_freqs)
l_freq = 0 if l_freq is None else float(l_freq)
if l_trans_bandwidth is None:
l_width = 3
else:
l_width = (int(((n_freqs - 1) * l_trans_bandwidth) /
(0.5 * sfreq)) + 1) // 2
l_start = int(((n_freqs - 1) * l_freq) / (0.5 * sfreq))
h_freq = sfreq / 2. if h_freq is None else float(h_freq)
h_width = (int(((n_freqs - 1) * h_trans_bandwidth) /
(0.5 * sfreq)) + 1) // 2
h_start = int(((n_freqs - 1) * h_freq) / (0.5 * sfreq))
logger.info('filter : %7.3f ... %6.1f Hz bins : %d ... %d of %d '
'hpw : %d lpw : %d' % (l_freq, h_freq, l_start, h_start,
n_freqs, l_width, h_width))
if l_freq > 0:
start = l_start - l_width + 1
stop = start + 2 * l_width - 1
if start < 0 or stop >= n_freqs:
raise RuntimeError('l_freq too low or l_trans_bandwidth too large')
freq_resp[:start] = 0.
k = np.arange(-l_width + 1, l_width) / float(l_width) + 3.
freq_resp[start:stop] = np.cos(np.pi / 4. * k) ** 2
if h_freq < sfreq / 2.:
start = h_start - h_width + 1
stop = start + 2 * h_width - 1
if start < 0 or stop >= n_freqs:
raise RuntimeError('h_freq too high or h_trans_bandwidth too '
'large')
k = np.arange(-h_width + 1, h_width) / float(h_width) + 1.
freq_resp[start:stop] *= np.cos(np.pi / 4. * k) ** 2
freq_resp[stop:] = 0.0
# Get the time-domain version of this signal
h = irfft(freq_resp, n=2 * len(freq_resp) - 1)
h = np.roll(h, n_freqs - 1) # center the impulse like a linear-phase filt
return h
def _filt_check_picks(info, picks, h_freq, l_freq):
from .io.pick import _picks_to_idx
update_info = False
# This will pick *all* data channels
picks = _picks_to_idx(info, picks, 'data_or_ica', exclude=())
if h_freq is not None or l_freq is not None:
data_picks = _picks_to_idx(info, None, 'data_or_ica', exclude=(),
allow_empty=True)
if len(data_picks) == 0:
logger.info('No data channels found. The highpass and '
'lowpass values in the measurement info will not '
'be updated.')
elif np.in1d(data_picks, picks).all():
update_info = True
else:
logger.info('Filtering a subset of channels. The highpass and '
'lowpass values in the measurement info will not '
'be updated.')
return update_info, picks
def _filt_update_info(info, update_info, l_freq, h_freq):
if update_info:
if h_freq is not None and (l_freq is None or l_freq < h_freq) and \
(info["lowpass"] is None or h_freq < info['lowpass']):
with info._unlock():
info['lowpass'] = float(h_freq)
if l_freq is not None and (h_freq is None or l_freq < h_freq) and \
(info["highpass"] is None or l_freq > info['highpass']):
with info._unlock():
info['highpass'] = float(l_freq)
|
{
"content_hash": "a485a3a5f058b6805386540cc3abbf42",
"timestamp": "",
"source": "github",
"line_count": 2345,
"max_line_length": 126,
"avg_line_length": 38.33262260127932,
"alnum_prop": 0.5504505506730448,
"repo_name": "olafhauk/mne-python",
"id": "01c16d3f8aebdf41c3d963ad4890b6da7b203e11",
"size": "89895",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mne/filter.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "24999"
},
{
"name": "JavaScript",
"bytes": "8008"
},
{
"name": "Jinja",
"bytes": "13067"
},
{
"name": "Makefile",
"bytes": "4528"
},
{
"name": "Python",
"bytes": "10131395"
},
{
"name": "Sass",
"bytes": "257"
},
{
"name": "Shell",
"bytes": "19587"
}
],
"symlink_target": ""
}
|
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicBjStatusRegistered(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.bj/status_registered.txt"
host = "whois.nic.bj"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, 'registered')
def test_available(self):
eq_(self.record.available, False)
def test_domain(self):
eq_(self.record.domain, "google.bj")
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(len(self.record.nameservers), 3)
eq_(self.record.nameservers[0].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[0].name, "ns1.google.com")
eq_(self.record.nameservers[1].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[1].name, "ns2.google.com")
eq_(self.record.nameservers[2].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[2].name, "ns3.google.com")
def test_admin_contacts(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.admin_contacts)
def test_registered(self):
eq_(self.record.registered, True)
def test_created_on(self):
eq_(self.record.created_on.__class__.__name__, 'datetime')
eq_(self.record.created_on, time_parse('2009-03-25 01:57:22 PDT'))
def test_registrar(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.registrar)
def test_registrant_contacts(self):
eq_(self.record.registrant_contacts.__class__.__name__, 'list')
eq_(len(self.record.registrant_contacts), 1)
eq_(self.record.registrant_contacts[0].__class__.__name__, 'Contact')
eq_(self.record.registrant_contacts[0].type, yawhois.record.Contact.TYPE_REGISTRANT)
eq_(self.record.registrant_contacts[0].id, None)
eq_(self.record.registrant_contacts[0].name, "GOOGLE INC (ED0155)")
eq_(self.record.registrant_contacts[0].organization, None)
eq_(self.record.registrant_contacts[0].address, None)
eq_(self.record.registrant_contacts[0].city, None)
eq_(self.record.registrant_contacts[0].zip, None)
eq_(self.record.registrant_contacts[0].state, None)
eq_(self.record.registrant_contacts[0].country_code, None)
eq_(self.record.registrant_contacts[0].phone, None)
eq_(self.record.registrant_contacts[0].fax, None)
eq_(self.record.registrant_contacts[0].email, None)
def test_technical_contacts(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.technical_contacts)
def test_updated_on(self):
eq_(self.record.updated_on.__class__.__name__, 'datetime')
eq_(self.record.updated_on, time_parse('2012-08-10 01:57:22 PDT'))
def test_domain_id(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.domain_id)
def test_expires_on(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.expires_on)
def test_disclaimer(self):
assert_raises(yawhois.exceptions.AttributeNotSupported, self.record.disclaimer)
|
{
"content_hash": "a661902ac343f04a94c4029331e0f7b1",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 95,
"avg_line_length": 44.36842105263158,
"alnum_prop": 0.6654804270462633,
"repo_name": "huyphan/pyyawhois",
"id": "a54d86f03233d9582d6ead97de0215316710529e",
"size": "3633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/record/parser/test_response_whois_nic_bj_status_registered.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1859653"
}
],
"symlink_target": ""
}
|
import datetime
import json
import logging
import requests
import dateutil.parser as dp
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
DEFAULT_MSG = CustomNotificationBase.DEFAULT_MSG
DEFAULT_APPROVAL_RUNNING_MSG = CustomNotificationBase.DEFAULT_APPROVAL_RUNNING_MSG
DEFAULT_APPROVAL_RUNNING_BODY = CustomNotificationBase.DEFAULT_APPROVAL_RUNNING_BODY
DEFAULT_APPROVAL_APPROVED_MSG = CustomNotificationBase.DEFAULT_APPROVAL_APPROVED_MSG
DEFAULT_APPROVAL_APPROVED_BODY = CustomNotificationBase.DEFAULT_APPROVAL_APPROVED_BODY
DEFAULT_APPROVAL_TIMEOUT_MSG = CustomNotificationBase.DEFAULT_APPROVAL_TIMEOUT_MSG
DEFAULT_APPROVAL_TIMEOUT_BODY = CustomNotificationBase.DEFAULT_APPROVAL_TIMEOUT_BODY
DEFAULT_APPROVAL_DENIED_MSG = CustomNotificationBase.DEFAULT_APPROVAL_DENIED_MSG
DEFAULT_APPROVAL_DENIED_BODY = CustomNotificationBase.DEFAULT_APPROVAL_DENIED_BODY
logger = logging.getLogger('awx.main.notifications.grafana_backend')
class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {"grafana_url": {"label": "Grafana URL", "type": "string"},
"grafana_key": {"label": "Grafana API Key", "type": "password"}}
recipient_parameter = "grafana_url"
sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}"
default_messages = {"started": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
"success": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
"error": {"body": DEFAULT_BODY, "message": DEFAULT_MSG},
"workflow_approval": {"running": {"message": DEFAULT_APPROVAL_RUNNING_MSG, "body": DEFAULT_APPROVAL_RUNNING_BODY},
"approved": {"message": DEFAULT_APPROVAL_APPROVED_MSG,"body": DEFAULT_APPROVAL_APPROVED_BODY},
"timed_out": {"message": DEFAULT_APPROVAL_TIMEOUT_MSG, "body": DEFAULT_APPROVAL_TIMEOUT_BODY},
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": DEFAULT_APPROVAL_DENIED_BODY}}}
def __init__(self, grafana_key,dashboardId=None, panelId=None, annotation_tags=None, grafana_no_verify_ssl=False, isRegion=True,
fail_silently=False, **kwargs):
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
self.grafana_key = grafana_key
self.dashboardId = dashboardId
self.panelId = panelId
self.annotation_tags = annotation_tags if annotation_tags is not None else []
self.grafana_no_verify_ssl = grafana_no_verify_ssl
self.isRegion = isRegion
def format_body(self, body):
# expect body to be a string representing a dict
try:
potential_body = json.loads(body)
if isinstance(potential_body, dict):
body = potential_body
except json.JSONDecodeError:
body = {}
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
grafana_data = {}
grafana_headers = {}
if 'started' in m.body:
try:
epoch=datetime.datetime.utcfromtimestamp(0)
grafana_data['time'] = grafana_data['timeEnd'] = int((dp.parse(m.body['started']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
if m.body.get('finished'):
grafana_data['timeEnd'] = int((dp.parse(m.body['finished']).replace(tzinfo=None) - epoch).total_seconds() * 1000)
except ValueError:
logger.error(smart_text(_("Error converting time {} or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
if not self.fail_silently:
raise Exception(smart_text(_("Error converting time {} and/or timeEnd {} to int.").format(m.body['started'],m.body['finished'])))
grafana_data['isRegion'] = self.isRegion
grafana_data['dashboardId'] = self.dashboardId
grafana_data['panelId'] = self.panelId
if self.annotation_tags:
grafana_data['tags'] = self.annotation_tags
grafana_data['text'] = m.subject
grafana_headers['Authorization'] = "Bearer {}".format(self.grafana_key)
grafana_headers['Content-Type'] = "application/json"
r = requests.post("{}/api/annotations".format(m.recipients()[0]),
json=grafana_data,
headers=grafana_headers,
verify=(not self.grafana_no_verify_ssl))
if r.status_code >= 400:
logger.error(smart_text(_("Error sending notification grafana: {}").format(r.status_code)))
if not self.fail_silently:
raise Exception(smart_text(_("Error sending notification grafana: {}").format(r.status_code)))
sent_messages += 1
return sent_messages
|
{
"content_hash": "9599a0cf6dd2763032dae10fdb5cf90f",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 155,
"avg_line_length": 53.173469387755105,
"alnum_prop": 0.6269430051813472,
"repo_name": "GoogleCloudPlatform/sap-deployment-automation",
"id": "8e8b6489520bcc2cbeec1a78a041225c2595e166",
"size": "5270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/github.com/ansible/awx/awx/main/notifications/grafana_backend.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import sys
import hashlib
import importlib
import gc
import logging
import subprocess
import os
# import own modules
sys.path.append('./')
try:
import sensors
except Exception as e:
print e
class MiniProbe(object):
"""
Main class for the Python Mini Probe
"""
def __init__(self):
gc.enable()
logging.basicConfig(
filename="./logs/probe.log",
filemode="a",
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
datefmt='%m/%d/%Y %H:%M:%S'
)
def get_import_sensors(self):
"""
import available sensor modules and return list of sensor objects
"""
sensor_objects = []
for mod in sensors.__all__:
try:
sensor_objects.append(self.load_class("sensors.%s.%s" % (mod.lower(), mod)))
except Exception as import_error:
logging.error("Sensor Import Error! Error message: %s" % import_error)
return sensor_objects
@staticmethod
def load_class(full_class_string):
"""
dynamically load a class from a string
"""
class_data = full_class_string.split(".")
module_path = ".".join(class_data[:-1])
class_str = class_data[-1]
module = importlib.import_module(module_path)
return getattr(module, class_str)
def read_config(self, path):
"""
read configuration file and write data to dict
"""
config = {}
try:
conf_file = open(path)
for line in conf_file:
if not (line == '\n'):
if not (line.startswith('#')):
config[line.split(':')[0]] = line.split(':')[1].rstrip()
conf_file.close()
return config
except Exception as read_error:
logging.error("No config found! Error Message: %s Exiting!" % read_error)
sys.exit()
@staticmethod
def hash_access_key(key):
"""
create hash of probes access key
"""
return hashlib.sha1(key).hexdigest()
def create_parameters(self, config, jsondata, i=None):
"""
create URL parameters for announce, task and data requests
"""
if i == 'announce':
return {'gid': config['gid'], 'key': self.hash_access_key(config['key']), 'protocol': config['protocol'],
'name': config['name'], 'baseinterval': config['baseinterval'], 'sensors': jsondata}
else:
return {'gid': config['gid'], 'key': self.hash_access_key(config['key']), 'protocol': config['protocol']}
def create_url(self, config, i=None, http=False):
"""
creating the actual URL
"""
prefix = "https"
if http:
prefix = "http"
if not (i is None) and (i != "data"):
return "%s://%s:%s/probe/%s" % (
prefix, config['server'], config['port'], i)
elif i == "data":
return "%s://%s:%s/probe/%s?gid=%s&protocol=%s&key=%s" % (prefix, config['server'], config['port'], i,
config['gid'], config['protocol'],
self.hash_access_key(config['key']))
pass
else:
return "No method given"
def build_announce(self, sensor_list):
"""
build json for announce request
"""
sensors_avail = []
for sensor in sensor_list:
if not sensor.get_sensordef() == "":
sensors_avail.append(sensor.get_sensordef())
return sensors_avail
@staticmethod
def clean_mem():
"""Ugly brute force method to clean up Mem"""
subprocess.call("sync", shell=False)
os.popen("sysctl vm.drop_caches=1")
os.popen("sysctl vm.drop_caches=2")
os.popen("sysctl vm.drop_caches=3")
|
{
"content_hash": "26f9c7f2ce494ed44ecbe2653a538ea1",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 117,
"avg_line_length": 32.403225806451616,
"alnum_prop": 0.5189148830263813,
"repo_name": "eagle00789/PythonMiniProbe",
"id": "54c756f6d619915b14fd529288bb2e64c8fc5e18",
"size": "5761",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "miniprobe/miniprobe.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "203383"
},
{
"name": "Shell",
"bytes": "3052"
},
{
"name": "Smarty",
"bytes": "105"
}
],
"symlink_target": ""
}
|
from oslo_config import cfg
from oslo_log import log as logging
from oslo_versionedobjects import fields
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder import objects
from cinder.objects import base
from cinder import utils
CONF = cfg.CONF
# NOTE(thangp): OPTIONAL_FIELDS are fields that would be lazy-loaded. They are
# typically the relationship in the sqlalchemy object.
OPTIONAL_FIELDS = ['volume', 'metadata']
LOG = logging.getLogger(__name__)
@base.CinderObjectRegistry.register
class Snapshot(base.CinderPersistentObject, base.CinderObject,
base.CinderObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.UUIDField(),
'user_id': fields.UUIDField(nullable=True),
'project_id': fields.UUIDField(nullable=True),
'volume_id': fields.UUIDField(),
'cgsnapshot_id': fields.UUIDField(nullable=True),
'status': fields.StringField(nullable=True),
'progress': fields.StringField(nullable=True),
'volume_size': fields.IntegerField(),
'display_name': fields.StringField(nullable=True),
'display_description': fields.StringField(nullable=True),
'encryption_key_id': fields.UUIDField(nullable=True),
'volume_type_id': fields.UUIDField(nullable=True),
'provider_location': fields.StringField(nullable=True),
'provider_id': fields.UUIDField(nullable=True),
'metadata': fields.DictOfStringsField(),
'provider_auth': fields.StringField(nullable=True),
'volume': fields.ObjectField('Volume', nullable=True),
}
# NOTE(thangp): obj_extra_fields is used to hold properties that are not
# usually part of the model
obj_extra_fields = ['name', 'volume_name']
@property
def name(self):
return CONF.snapshot_name_template % self.id
@property
def volume_name(self):
return self.volume.name
def __init__(self, *args, **kwargs):
super(Snapshot, self).__init__(*args, **kwargs)
self._orig_metadata = {}
self._reset_metadata_tracking()
def obj_reset_changes(self, fields=None):
super(Snapshot, self).obj_reset_changes(fields)
self._reset_metadata_tracking(fields=fields)
def _reset_metadata_tracking(self, fields=None):
if fields is None or 'metadata' in fields:
self._orig_metadata = (dict(self.metadata)
if 'metadata' in self else {})
def obj_what_changed(self):
changes = super(Snapshot, self).obj_what_changed()
if 'metadata' in self and self.metadata != self._orig_metadata:
changes.add('metadata')
return changes
def obj_make_compatible(self, primitive, target_version):
"""Make an object representation compatible with a target version."""
target_version = utils.convert_version_to_tuple(target_version)
@staticmethod
def _from_db_object(context, snapshot, db_snapshot, expected_attrs=None):
if expected_attrs is None:
expected_attrs = []
for name, field in snapshot.fields.items():
if name in OPTIONAL_FIELDS:
continue
value = db_snapshot.get(name)
if isinstance(field, fields.IntegerField):
value = value if value is not None else 0
snapshot[name] = value
if 'volume' in expected_attrs:
volume = objects.Volume(context)
volume._from_db_object(context, volume, db_snapshot['volume'])
snapshot.volume = volume
if 'metadata' in expected_attrs:
metadata = db_snapshot.get('snapshot_metadata')
if metadata is None:
raise exception.MetadataAbsent()
snapshot.metadata = {item['key']: item['value']
for item in metadata}
snapshot._context = context
snapshot.obj_reset_changes()
return snapshot
@base.remotable_classmethod
def get_by_id(cls, context, id):
db_snapshot = db.snapshot_get(context, id)
return cls._from_db_object(context, cls(context), db_snapshot,
expected_attrs=['metadata'])
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason=_('already created'))
updates = self.cinder_obj_get_changes()
if 'volume' in updates:
raise exception.ObjectActionError(action='create',
reason=_('volume assigned'))
db_snapshot = db.snapshot_create(self._context, updates)
self._from_db_object(self._context, self, db_snapshot)
@base.remotable
def save(self):
updates = self.cinder_obj_get_changes()
if updates:
if 'volume' in updates:
raise exception.ObjectActionError(action='save',
reason=_('volume changed'))
if 'metadata' in updates:
# Metadata items that are not specified in the
# self.metadata will be deleted
metadata = updates.pop('metadata', None)
self.metadata = db.snapshot_metadata_update(self._context,
self.id, metadata,
True)
db.snapshot_update(self._context, self.id, updates)
self.obj_reset_changes()
@base.remotable
def destroy(self):
db.snapshot_destroy(self._context, self.id)
def obj_load_attr(self, attrname):
if attrname not in OPTIONAL_FIELDS:
raise exception.ObjectActionError(
action='obj_load_attr',
reason=_('attribute %s not lazy-loadable') % attrname)
if not self._context:
raise exception.OrphanedObjectError(method='obj_load_attr',
objtype=self.obj_name())
if attrname == 'volume':
self.volume = objects.Volume.get_by_id(self._context,
self.volume_id)
self.obj_reset_changes(fields=[attrname])
def delete_metadata_key(self, context, key):
db.snapshot_metadata_delete(context, self.id, key)
md_was_changed = 'metadata' in self.obj_what_changed()
del self.metadata[key]
self._orig_metadata.pop(key, None)
if not md_was_changed:
self.obj_reset_changes(['metadata'])
@base.remotable_classmethod
def snapshot_data_get_for_project(cls, context, project_id,
volume_type_id=None):
return db.snapshot_data_get_for_project(context, project_id,
volume_type_id)
@base.CinderObjectRegistry.register
class SnapshotList(base.ObjectListBase, base.CinderObject):
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('Snapshot'),
}
child_versions = {
'1.0': '1.0'
}
@base.remotable_classmethod
def get_all(cls, context, search_opts):
snapshots = db.snapshot_get_all(context, search_opts)
return base.obj_make_list(context, cls(), objects.Snapshot,
snapshots,
expected_attrs=['metadata'])
@base.remotable_classmethod
def get_by_host(cls, context, host, filters=None):
snapshots = db.snapshot_get_by_host(context, host, filters)
return base.obj_make_list(context, cls(context), objects.Snapshot,
snapshots, expected_attrs=['metadata'])
@base.remotable_classmethod
def get_all_by_project(cls, context, project_id, search_opts):
snapshots = db.snapshot_get_all_by_project(context, project_id,
search_opts)
return base.obj_make_list(context, cls(context), objects.Snapshot,
snapshots, expected_attrs=['metadata'])
@base.remotable_classmethod
def get_all_for_volume(cls, context, volume_id):
snapshots = db.snapshot_get_all_for_volume(context, volume_id)
return base.obj_make_list(context, cls(context), objects.Snapshot,
snapshots, expected_attrs=['metadata'])
@base.remotable_classmethod
def get_active_by_window(cls, context, begin, end):
snapshots = db.snapshot_get_active_by_window(context, begin, end)
return base.obj_make_list(context, cls(context), objects.Snapshot,
snapshots, expected_attrs=['metadata'])
@base.remotable_classmethod
def get_all_for_cgsnapshot(cls, context, cgsnapshot_id):
snapshots = db.snapshot_get_all_for_cgsnapshot(context, cgsnapshot_id)
return base.obj_make_list(context, cls(context), objects.Snapshot,
snapshots, expected_attrs=['metadata'])
|
{
"content_hash": "03101e3272749c855aa3db429ca86c19",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 78,
"avg_line_length": 38.68487394957983,
"alnum_prop": 0.5930270446399478,
"repo_name": "tlakshman26/cinder-https-changes",
"id": "c482313894cde4282c39645a08a77c08bd468855",
"size": "9819",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cinder/objects/snapshot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12281139"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
}
|
import shutil, sys, os, stat
from subprocess import *
#idaBatchAnalyze.py "C:\Program Files (x86)\Intuit" "C:\analysis\Intuit" "C:\Program Files (x86)\IDA\idag.exe" C:\analysis\analysis_script.py
#idaBatchAnalyze.py "C:\Users\apridgen\Desktop\ICA_Client" "C:\analysis\ICA_Client" "C:\Program Files (x86)\IDA\idag.exe" C:\analysis\analysis_script.py
SCRIPT = '''
import shutil, sys, os
def batch_analyze_function():
base_output = "%s"
ida_file = GetInputFilePath()
new_base_fname = os.path.split(ida_file)[1]
new_base_fname = new_base_fname.replace(".","_")
new_file = os.path.join(base_output, new_base_fname)
new_file = new_file+".idb"
x = SegStart(BeginEA())
y = SegEnd(BeginEA())
Wait()
AnalyseArea(x,y)
Wait()
SaveBase(new_file)
Wait()
Exit(0)
batch_analyze_function()
'''
def enableWritePerms(path):
os.chmod(path, stat.S_IWRITE)
for i in os.listdir(path):
f = os.path.join(path, i)
if os.path.isfile(f):
os.chmod(f, stat.S_IWRITE)
else:
enableWritePerms(f)
def write_dynamic_script(fname, *args):
global SCRIPT
print "Generating script %s."%fname
a = tuple([i.replace("\\","\\\\") for i in args])
print "\t\t arguments: %s"%str(a)
script = SCRIPT%a
#print "Filename: %s"%fname
#print script
f = open(fname, "w")
f.write(script)
f.close()
def run_cmd(cmd_list):
x = Popen(cmd_list, stdout=PIPE)
return x.stdout.read()
def clone_tree(src, dest):
shutil.copytree(src, dest)
def perform_ida_analysis(ida_path, analysis_script, filename):
cmd_str = ['"%s"'%ida_path,
'-A',
'-OIDAPython:"%s"'%analysis_script,
'"%s"'%filename
]
#print "Calling the following command: %s"%(" ".join(cmd_str))
print "Performing analysis on: %s"%(filename)
return run_cmd(" ".join(cmd_str))
def perform_analysis(ida, analysis_script_name, dir):
basedir = dir
subdirlist = []
print "Entered %s"%(dir)
write_dynamic_script(analysis_script_name, basedir)
#print os.listdir(basedir)
for i in os.listdir(basedir):
if os.path.isfile(os.path.join(basedir, i)):
if i[-4:].lower() == ".cat" or i[-4:].lower() == ".drv" or i[-4:].lower() == ".sys" or i[-4:].lower() == ".dll" or i[-4:].lower() == ".exe":
print "Found: %s"%(os.path.join(basedir, i))
perform_ida_analysis(ida, analysis_script_name, os.path.join(basedir, i))
# remove the idb file after analysis
print "Removing: %s"%os.path.join(basedir, i[:-4]+".idb")
x = os.path.join(basedir, i[:-4]+".idb")
if os.path.exists(x):
os.remove(x)
elif i[-4:].lower() == ".idb":
continue
else:
os.remove(os.path.join(basedir, i))
else:
subdirlist.append(os.path.join(basedir, i))
os.remove(analysis_script_name)
for fq_path in subdirlist:
perform_analysis(ida, analysis_script_name, fq_path)
usage = "idaBatchAnalyze.py <src_dir> <dest_dir> <idapath> <analysis_script>"
if __name__ == "__main__":
if len(sys.argv) != 5:
print usage
src_dir = sys.argv[1]
dest_dir = sys.argv[2]
ida = sys.argv[3]
script = sys.argv[4]
print "Cloning Directory:\n\tFrom: %s\n\tTo: %s"%(src_dir, dest_dir)
print "\n"
print "IDA Executable: %s"%ida
print "Dynamic Script Name Used for Analysis: %s"%script
clone_tree(src_dir, dest_dir)
print "Finished cloning.."
#print "Setting Write Perms in the Directory"
#enableWritePerms(dest_dir)
#print "Done Setting Perms"
print "Beginning Recursive Analysis."
perform_analysis(ida, script, dest_dir)
print "Completed Recursive Batch Analysis"
|
{
"content_hash": "72f698842135b3d08c99c148b0248a77",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 152,
"avg_line_length": 32,
"alnum_prop": 0.6427364864864865,
"repo_name": "deeso/python_scrirpts",
"id": "a33d4785079efffbbb578ca0346c63c35a0e8eb7",
"size": "3942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ida/idaBatchAnalyzer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "569915"
}
],
"symlink_target": ""
}
|
"""info module tests."""
import mox
import stubout
from google.apputils import app
from google.apputils import basetest
from simian.mac.api import info
class InfoModuleTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
# TODO(user): Add tests for info.InfoHandler class.
def main(unused_argv):
basetest.main()
if __name__ == '__main__':
app.run()
|
{
"content_hash": "57398bd5ac71d15309505788c4ab478c",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 51,
"avg_line_length": 16.322580645161292,
"alnum_prop": 0.7035573122529645,
"repo_name": "alexandregz/simian",
"id": "7ce3e291d82cd4c293d87fc763ff4c405d239d8c",
"size": "1131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tests/simian/mac/api/info_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "37847"
},
{
"name": "HTML",
"bytes": "89696"
},
{
"name": "JavaScript",
"bytes": "28084"
},
{
"name": "Makefile",
"bytes": "8128"
},
{
"name": "Python",
"bytes": "1431095"
},
{
"name": "Shell",
"bytes": "19945"
}
],
"symlink_target": ""
}
|
import speech_recognition as sr
def recognize():
r = sr.Recognizer()
with sr.Microphone(device_index = 1, sample_rate = 44100, chunk_size = 512) as source:
print("Say something!")
audio = r.listen(source)
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
trans = r.recognize_google(audio)
print trans
return trans
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
# import pyaudio
#
# def recognize():
# p = pyaudio.PyAudio()
#
# stream = p.open(format = pyaudio.paInt16,
# channels = 1,
# rate = 256, #TODO tune this
# input = True,
# frames_per_buffer = 4, #TODO this too
# input_device_index = 2) #TODO hardcode this right
#
# recog = sr.Recognizer("en-GB")
# r = array('h')
#
# ## You will need to loop this next bit until you are finished recording
# data = array('h',stream.read(framesPerBuffer))
# rms = audioop.rms(data,2)
# r.extend(data)
# r.append(data)
# ########
# data = pack('<' + ('h'*len(r)), *r)
# sample_width = p.get_sample_size(pyaudio.paInt16)
#
# wf = wave.open("/home/pi/Desktop/foo.wav", 'wb')
# wf.setnchannels(1)
# wf.setsampwidth(sample_width)
# wf.setframerate(sRate)
# wf.writeframes(data)
# wf.close()
#
# with sr.WavFile("foo.wav") as source:
# return recog.record(source)
|
{
"content_hash": "247e5fb2a698ad2a025238038965ca44",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 102,
"avg_line_length": 33.34615384615385,
"alnum_prop": 0.615916955017301,
"repo_name": "TrevorEdwards/Jauffre",
"id": "90295a185fba57fd7463a5354e3fa1724a3713c1",
"size": "1734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jauffre/speech.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52135"
}
],
"symlink_target": ""
}
|
import time
import json
import urllib
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib import auth
from utils.shortcuts import error_response, error_page
from .models import ADMIN
class SessionSecurityMiddleware(object):
def process_request(self, request):
if request.user.is_authenticated() and request.user.admin_type >= ADMIN:
if "last_activity" in request.session:
# 24个小时没有活动
if time.time() - request.session["last_activity"] >= 24 * 60 * 60:
auth.logout(request)
if request.is_ajax():
return HttpResponse(json.dumps({"code": 1, "data": u"请先登录"}),
content_type="application/json")
else:
return HttpResponseRedirect("/login/?__from=" + urllib.quote(request.build_absolute_uri()))
# 更新最后活动日期
request.session["last_activity"] = time.time()
|
{
"content_hash": "f3b56c148aa1c7ebf55217887b3d16df",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 115,
"avg_line_length": 43.73913043478261,
"alnum_prop": 0.5894632206759444,
"repo_name": "mcmdhr/CSOJ",
"id": "4295598f40d0f76f092d62f5ea985928d1585c75",
"size": "1357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "account/middleware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48167"
},
{
"name": "HTML",
"bytes": "173998"
},
{
"name": "JavaScript",
"bytes": "130711"
},
{
"name": "Python",
"bytes": "247487"
},
{
"name": "Shell",
"bytes": "540"
}
],
"symlink_target": ""
}
|
import os
#os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
import string
import random
import hashlib
from datetime import datetime
from netaddr import IPAddress, IPNetwork
from xos import settings
from django.core import management
from core.models import *
from xos.config import Config
try:
from openstack.client import OpenStackClient
from openstack.driver import OpenStackDriver
has_openstack = True
except:
has_openstack = False
manager_enabled = Config().api_nova_enabled
def random_string(size=6):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(size))
def require_enabled(callable):
def wrapper(*args, **kwds):
if manager_enabled and has_openstack:
return callable(*args, **kwds)
else:
return None
return wrapper
class OpenStackManager:
def __init__(self, auth={}, caller=None):
self.client = None
self.driver = None
self.caller = None
self.has_openstack = has_openstack
self.enabled = manager_enabled
if has_openstack and manager_enabled:
if auth:
try:
self.init_user(auth, caller)
except:
# if this fails then it meanse the caller doesn't have a
# role at the slice's tenant. if the caller is an admin
# just use the admin client/manager.
if caller and caller.is_admin:
self.init_admin()
else: raise
else:
self.init_admin()
@require_enabled
def init_caller(self, caller, tenant):
auth = {'username': caller.email,
'password': hashlib.md5(caller.password).hexdigest()[:6],
'tenant': tenant}
self.client = OpenStackClient(**auth)
self.driver = OpenStackDriver(client=self.client)
self.caller = caller
@require_enabled
def init_admin(self, tenant=None):
# use the admin credentials
self.client = OpenStackClient(tenant=tenant)
self.driver = OpenStackDriver(client=self.client)
self.caller = self.driver.admin_user
self.caller.kuser_id = self.caller.id
@require_enabled
def save_role(self, role):
if not role.role:
keystone_role = self.driver.create_role(role.role_type)
role.role = keystone_role.id
@require_enabled
def delete_role(self, role):
if role.role:
self.driver.delete_role({'id': role.role})
@require_enabled
def save_key(self, key, name):
key_fields = {'name': name,
'public_key': key}
nova_key = self.driver.create_keypair(**key_fields)
@require_enabled
def delete_key(self, key):
if key.nkey_id:
self.driver.delete_keypair(key.nkey_id)
@require_enabled
def save_user(self, user):
name = user.email[:user.email.find('@')]
user_fields = {'name': name,
'email': user.email,
'password': hashlib.md5(user.password).hexdigest()[:6],
'enabled': True}
if not user.kuser_id:
keystone_user = self.driver.create_user(**user_fields)
user.kuser_id = keystone_user.id
else:
self.driver.update_user(user.kuser_id, user_fields)
if user.site:
self.driver.add_user_role(user.kuser_id, user.site.tenant_id, 'user')
if user.is_admin:
self.driver.add_user_role(user.kuser_id, user.site.tenant_id, 'admin')
else:
# may have admin role so attempt to remove it
self.driver.delete_user_role(user.kuser_id, user.site.tenant_id, 'admin')
if user.public_key:
self.init_caller(user, user.site.login_base)
self.save_key(user.public_key, user.keyname)
self.init_admin()
user.save()
user.enacted = datetime.now()
user.save(update_fields=['enacted'])
@require_enabled
def delete_user(self, user):
if user.kuser_id:
self.driver.delete_user(user.kuser_id)
@require_enabled
def save_site(self, site, add_role=True):
if not site.tenant_id:
tenant = self.driver.create_tenant(tenant_name=site.login_base,
description=site.name,
enabled=site.enabled)
site.tenant_id = tenant.id
# give caller an admin role at the tenant they've created
self.driver.add_user_role(self.caller.kuser_id, tenant.id, 'admin')
# update the record
if site.id and site.tenant_id:
self.driver.update_tenant(site.tenant_id,
description=site.name,
enabled=site.enabled)
# commit the updated record
site.save()
site.enacted = datetime.now()
site.save(update_fields=['enacted']) # enusre enacted > updated
@require_enabled
def delete_site(self, site):
if site.tenant_id:
self.driver.delete_tenant(site.tenant_id)
@require_enabled
def save_site_privilege(self, site_priv):
if site_priv.user.kuser_id and site_priv.site.tenant_id:
self.driver.add_user_role(site_priv.user.kuser_id,
site_priv.site.tenant_id,
site_priv.role.role_type)
site_priv.enacted = datetime.now()
site_priv.save(update_fields=['enacted'])
@require_enabled
def delete_site_privilege(self, site_priv):
self.driver.delete_user_role(site_priv.user.kuser_id,
site_priv.site.tenant_id,
site_priv.role.role_type)
@require_enabled
def save_slice(self, slice):
if not slice.tenant_id:
nova_fields = {'tenant_name': slice.name,
'description': slice.description,
'enabled': slice.enabled}
tenant = self.driver.create_tenant(**nova_fields)
slice.tenant_id = tenant.id
# give caller an admin role at the tenant they've created
self.driver.add_user_role(self.caller.kuser_id, tenant.id, 'admin')
# refresh credentials using this tenant
self.driver.shell.connect(username=self.driver.shell.keystone.username,
password=self.driver.shell.keystone.password,
tenant=tenant.name)
# create network
network = self.driver.create_network(slice.name)
slice.network_id = network['id']
# create router
router = self.driver.create_router(slice.name)
slice.router_id = router['id']
# create subnet
next_subnet = self.get_next_subnet()
cidr = str(next_subnet.cidr)
ip_version = next_subnet.version
start = str(next_subnet[2])
end = str(next_subnet[-2])
subnet = self.driver.create_subnet(name=slice.name,
network_id = network['id'],
cidr_ip = cidr,
ip_version = ip_version,
start = start,
end = end)
slice.subnet_id = subnet['id']
# add subnet as interface to slice's router
self.driver.add_router_interface(router['id'], subnet['id'])
# add external route
self.driver.add_external_route(subnet)
if slice.id and slice.tenant_id:
self.driver.update_tenant(slice.tenant_id,
description=slice.description,
enabled=slice.enabled)
slice.save()
slice.enacted = datetime.now()
slice.save(update_fields=['enacted'])
@require_enabled
def delete_slice(self, slice):
if slice.tenant_id:
self._delete_slice(slice.tenant_id, slice.network_id,
slice.router_id, slice.subnet_id)
@require_enabled
def _delete_slice(self, tenant_id, network_id, router_id, subnet_id):
self.driver.delete_router_interface(slice.router_id, slice.subnet_id)
self.driver.delete_subnet(slice.subnet_id)
self.driver.delete_router(slice.router_id)
self.driver.delete_network(slice.network_id)
self.driver.delete_tenant(slice.tenant_id)
# delete external route
subnet = None
subnets = self.driver.shell.quantum.list_subnets()['subnets']
for snet in subnets:
if snet['id'] == slice.subnet_id:
subnet = snet
if subnet:
self.driver.delete_external_route(subnet)
@require_enabled
def save_slice_membership(self, slice_memb):
if slice_memb.user.kuser_id and slice_memb.slice.tenant_id:
self.driver.add_user_role(slice_memb.user.kuser_id,
slice_memb.slice.tenant_id,
slice_memb.role.role_type)
slice_memb.enacted = datetime.now()
slice_memb.save(update_fields=['enacted'])
@require_enabled
def delete_slice_membership(self, slice_memb):
self.driver.delete_user_role(slice_memb.user.kuser_id,
slice_memb.slice.tenant_id,
slice_memb.role.role_type)
@require_enabled
def get_next_subnet(self):
# limit ourself to 10.0.x.x for now
valid_subnet = lambda net: net.startswith('10.0')
subnets = self.driver.shell.quantum.list_subnets()['subnets']
ints = [int(IPNetwork(subnet['cidr']).ip) for subnet in subnets \
if valid_subnet(subnet['cidr'])]
ints.sort()
last_ip = IPAddress(ints[-1])
last_network = IPNetwork(str(last_ip) + "/24")
next_network = IPNetwork(str(IPAddress(last_network) + last_network.size) + "/24")
return next_network
@require_enabled
def save_subnet(self, subnet):
if not subnet.subnet_id:
quantum_subnet = self.driver.create_subnet(name= subnet.slice.name,
network_id=subnet.slice.network_id,
cidr_ip = subnet.cidr,
ip_version=subnet.ip_version,
start = subnet.start,
end = subnet.end)
subnet.subnet_id = quantum_subnet['id']
# add subnet as interface to slice's router
self.driver.add_router_interface(subnet.slice.router_id, subnet.subnet_id)
#add_route = 'route add -net %s dev br-ex gw 10.100.0.5' % self.cidr
#commands.getstatusoutput(add_route)
@require_enabled
def delete_subnet(self, subnet):
if subnet.subnet_id:
self.driver.delete_router_interface(subnet.slice.router_id, subnet.subnet_id)
self.driver.delete_subnet(subnet.subnet_id)
#del_route = 'route del -net %s' % self.cidr
#commands.getstatusoutput(del_route)
def get_requested_networks(self, slice):
network_ids = [x.network_id for x in slice.networks.all()]
if slice.network_id is not None:
network_ids.append(slice.network_id)
networks = []
for network_id in network_ids:
networks.append({"net-id": network_id})
return networks
@require_enabled
def save_sliver(self, sliver):
metadata_update = {}
if ("numberCores" in sliver.changed_fields):
metadata_update["cpu_cores"] = str(sliver.numberCores)
for tag in sliver.slice.tags.all():
if tag.name.startswith("sysctl-"):
metadata_update[tag.name] = tag.value
if not sliver.instance_id:
nics = self.get_requested_networks(sliver.slice)
for nic in nics:
# If a network hasn't been instantiated yet, then we'll fail
# during slice creation. Defer saving the sliver for now.
if not nic.get("net-id", None):
sliver.save() # in case it hasn't been saved yet
return
slice_memberships = SliceMembership.objects.filter(slice=sliver.slice)
pubkeys = [sm.user.public_key for sm in slice_memberships if sm.user.public_key]
pubkeys.append(sliver.creator.public_key)
instance = self.driver.spawn_instance(name=sliver.name,
key_name = sliver.creator.keyname,
image_id = sliver.image.image_id,
hostname = sliver.node.name,
pubkeys = pubkeys,
nics = nics,
metadata = metadata_update )
sliver.instance_id = instance.id
sliver.instance_name = getattr(instance, 'OS-EXT-SRV-ATTR:instance_name')
else:
if metadata_update:
self.driver.update_instance_metadata(sliver.instance_id, metadata_update)
sliver.save()
sliver.enacted = datetime.now()
sliver.save(update_fields=['enacted'])
@require_enabled
def delete_sliver(self, sliver):
if sliver.instance_id:
self.driver.destroy_instance(sliver.instance_id)
def refresh_nodes(self):
# collect local nodes
nodes = Node.objects.all()
nodes_dict = {}
for node in nodes:
if 'viccidev10' not in node.name:
nodes_dict[node.name] = node
deployment = Deployment.objects.filter(name='VICCI')[0]
login_bases = ['princeton', 'stanford', 'gt', 'uw', 'mpisws']
sites = Site.objects.filter(login_base__in=login_bases)
# collect nova nodes:
compute_nodes = self.client.nova.hypervisors.list()
compute_nodes_dict = {}
for compute_node in compute_nodes:
compute_nodes_dict[compute_node.hypervisor_hostname] = compute_node
# add new nodes:
new_node_names = set(compute_nodes_dict.keys()).difference(nodes_dict.keys())
i = 0
max = len(sites)
for name in new_node_names:
if i == max:
i = 0
site = sites[i]
node = Node(name=compute_nodes_dict[name].hypervisor_hostname,
site=site,
deployment=deployment)
node.save()
i+=1
# remove old nodes
old_node_names = set(nodes_dict.keys()).difference(compute_nodes_dict.keys())
Node.objects.filter(name__in=old_node_names).delete()
def refresh_images(self):
from core.models.image import Image
# collect local images
images = Image.objects.all()
images_dict = {}
for image in images:
images_dict[image.name] = image
# collect glance images
glance_images = self.client.glance.get_images()
glance_images_dict = {}
for glance_image in glance_images:
glance_images_dict[glance_image['name']] = glance_image
# add new images
new_image_names = set(glance_images_dict.keys()).difference(images_dict.keys())
for name in new_image_names:
image = Image(image_id=glance_images_dict[name]['id'],
name=glance_images_dict[name]['name'],
disk_format=glance_images_dict[name]['disk_format'],
container_format=glance_images_dict[name]['container_format'])
image.save()
# remove old images
old_image_names = set(images_dict.keys()).difference(glance_images_dict.keys())
Image.objects.filter(name__in=old_image_names).delete()
@require_enabled
def save_network(self, network):
if not network.network_id:
if network.template.shared_network_name:
network.network_id = network.template.shared_network_id
(network.subnet_id, network.subnet) = self.driver.get_network_subnet(network.network_id)
else:
network_name = network.name
# create network
os_network = self.driver.create_network(network_name, shared=True)
network.network_id = os_network['id']
# create router
router = self.driver.create_router(network_name)
network.router_id = router['id']
# create subnet
next_subnet = self.get_next_subnet()
cidr = str(next_subnet.cidr)
ip_version = next_subnet.version
start = str(next_subnet[2])
end = str(next_subnet[-2])
subnet = self.driver.create_subnet(name=network_name,
network_id = network.network_id,
cidr_ip = cidr,
ip_version = ip_version,
start = start,
end = end)
network.subnet = cidr
network.subnet_id = subnet['id']
# add subnet as interface to slice's router
self.driver.add_router_interface(router['id'], subnet['id'])
# add external route
self.driver.add_external_route(subnet)
network.save()
network.enacted = datetime.now()
network.save(update_fields=['enacted'])
def delete_network(self, network):
if (network.router_id) and (network.subnet_id):
self.driver.delete_router_interface(network.router_id, network.subnet_id)
if network.subnet_id:
self.driver.delete_subnet(network.subnet_id)
if network.router_id:
self.driver.delete_router(network.router_id)
if network.network_id:
self.driver.delete_network(network.network_id)
def save_network_template(self, template):
if (template.shared_network_name) and (not template.shared_network_id):
os_networks = self.driver.shell.quantum.list_networks(name=template.shared_network_name)['networks']
if os_networks:
template.shared_network_id = os_networks[0]["id"]
template.save()
template.enacted = datetime.now()
template.save(update_fields=['enacted'])
def find_or_make_template_for_network(self, name):
""" Given a network name, try to guess the right template for it """
# templates for networks we may encounter
if name=='nat-net':
template_dict = None # {"name": "private-nat", "visibility": "private", "translation": "nat"}
elif name=='sharednet1':
template_dict = {"name": "dedicated-public", "visibility": "public", "translation": "none"}
else:
template_dict = {"name": "private", "visibility": "private", "translation": "none"}
# if we have an existing template return it
templates = NetworkTemplate.objects.filter(name=template_dict["name"])
if templates:
return templates[0]
if template_dict == None:
return None
template = NetworkTemplate(**template_dict)
template.save()
return template
def refresh_network_templates(self):
for template in NetworkTemplate.objects.all():
if (template.shared_network_name) and (not template.shared_network_id):
# this will cause us to try to fill in the shared_network_id
self.save_network_template(template)
def refresh_networks(self):
# get a list of all networks in the model
networks = Network.objects.all()
networks_by_name = {}
networks_by_id = {}
for network in networks:
networks_by_name[network.name] = network
networks_by_id[network.network_id] = network
# Get a list of all shared networks in OS
os_networks = self.driver.shell.quantum.list_networks()['networks']
os_networks_by_name = {}
os_networks_by_id = {}
for os_network in os_networks:
os_networks_by_name[os_network['name']] = os_network
os_networks_by_id[os_network['id']] = os_network
for (uuid, os_network) in os_networks_by_id.items():
#print "checking OS network", os_network['name']
if (os_network['shared']) and (uuid not in networks_by_id):
# Only automatically create shared networks. This is for Andy's
# nat-net and sharednet1.
owner_slice = Slice.objects.get(tenant_id = os_network['tenant_id'])
template = self.find_or_make_template_for_network(os_network['name'])
if (template is None):
# This is our way of saying we don't want to auto-instantiate
# this network type.
continue
(subnet_id, subnet) = self.driver.get_network_subnet(os_network['id'])
if owner_slice:
#print "creating model object for OS network", os_network['name']
new_network = Network(name = os_network['name'],
template = template,
owner = owner_slice,
network_id = uuid,
subnet_id = subnet_id)
new_network.save()
for (network_id, network) in networks_by_id.items():
# If the network disappeared from OS, then reset its network_id to None
if (network.network_id is not None) and (network.network_id not in os_networks_by_id):
network.network_id = None
# If no OS object exists, then saving the network will create one
if (network.network_id is None):
#print "creating OS network for", network.name
self.save_network(network)
else:
pass #print "network", network.name, "has its OS object"
|
{
"content_hash": "4b15e29db412de7491a4eca27ef30d03",
"timestamp": "",
"source": "github",
"line_count": 568,
"max_line_length": 112,
"avg_line_length": 40.41021126760563,
"alnum_prop": 0.5499934649065482,
"repo_name": "wathsalav/xos",
"id": "42b1ef18aa5ef6ca67e24a18a57ec7fabb7c6949",
"size": "22953",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xos/openstack/manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "370"
},
{
"name": "CSS",
"bytes": "37088"
},
{
"name": "HTML",
"bytes": "636864"
},
{
"name": "JavaScript",
"bytes": "760492"
},
{
"name": "Makefile",
"bytes": "2717"
},
{
"name": "Python",
"bytes": "1160110"
},
{
"name": "Shell",
"bytes": "10483"
}
],
"symlink_target": ""
}
|
"""
urlresolver XBMC Addon
Copyright (C) 2015 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import json
from urlresolver import common
from urlresolver.common import i18n
from urlresolver.resolver import UrlResolver, ResolverError
logger = common.log_utils.Logger.get_logger(__name__)
logger.disable()
class RPnetResolver(UrlResolver):
name = "RPnet"
domains = ["*"]
def __init__(self):
self.net = common.Net()
self.patterns = None
self.hosts = None
# UrlResolver methods
def get_media_url(self, host, media_id):
username = self.get_setting('username')
password = self.get_setting('password')
url = 'https://premium.rpnet.biz/client_api.php'
query = urllib.urlencode({'username': username, 'password': password, 'action': 'generate', 'links': media_id})
url = url + '?' + query
response = self.net.http_GET(url).content
response = json.loads(response)
if 'links' in response and response['links']:
link = response['links'][0]
if 'generated' in link:
return link['generated']
elif 'error' in link:
raise ResolverError(link['error'])
else:
msg = 'No Link Returned'
if 'error' in response and response['error']:
msg += ': %s' % (response['error'][0])
raise ResolverError(msg)
def get_url(self, host, media_id):
return media_id
def get_host_and_id(self, url):
return 'rpnet.biz', url
@common.cache.cache_method(cache_limit=8)
def get_all_hosters(self):
try:
patterns = []
url = 'http://premium.rpnet.biz/hoster.json'
response = self.net.http_GET(url).content
hosters = json.loads(response)
logger.log_debug('rpnet patterns: %s' % (hosters))
patterns = [re.compile(pattern) for pattern in hosters['supported']]
except Exception as e:
logger.log_error('Error getting RPNet patterns: %s' % (e))
return patterns
@common.cache.cache_method(cache_limit=8)
def get_hosts(self):
try:
hosts = []
url = 'http://premium.rpnet.biz/hoster2.json'
response = self.net.http_GET(url).content
hosts = json.loads(response)['supported']
logger.log_debug('rpnet hosts: %s' % (hosts))
except Exception as e:
logger.log_error('Error getting RPNet hosts: %s' % (e))
return hosts
def valid_url(self, url, host):
if url:
if self.patterns is None:
self.patterns = self.get_all_hosters()
if any(pattern.search(url) for pattern in self.patterns):
return True
elif host:
if self.hosts is None:
self.hosts = self.get_hosts()
if host.startswith('www.'): host = host.replace('www.', '')
if any(host in item for item in self.hosts):
return True
return False
@classmethod
def get_settings_xml(cls):
xml = super(cls, cls).get_settings_xml(include_login=False)
xml.append('<setting id="%s_login" type="bool" label="%s" default="false"/>' % (cls.__name__, i18n('login')))
xml.append('<setting id="%s_username" enable="eq(-1,true)" type="text" label="%s" default=""/>' % (cls.__name__, i18n('username')))
xml.append('<setting id="%s_password" enable="eq(-2,true)" type="text" label="%s" option="hidden" default=""/>' % (cls.__name__, i18n('password')))
return xml
@classmethod
def isUniversal(self):
return True
|
{
"content_hash": "766996bfd9ba65cc794b827b2ec04fff",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 155,
"avg_line_length": 37.2991452991453,
"alnum_prop": 0.5955545371219065,
"repo_name": "TheWardoctor/Wardoctors-repo",
"id": "d4397074bfe8fa88520a760949844c76a8b49d97",
"size": "4364",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "script.module.urlresolver/lib/urlresolver/plugins/rpnet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3208"
},
{
"name": "JavaScript",
"bytes": "115722"
},
{
"name": "Python",
"bytes": "34405207"
},
{
"name": "Shell",
"bytes": "914"
}
],
"symlink_target": ""
}
|
"""The tests for the Tasmota light platform."""
import copy
import json
from unittest.mock import patch
from hatasmota.const import CONF_MAC
from hatasmota.utils import (
get_topic_stat_result,
get_topic_tele_state,
get_topic_tele_will,
)
from homeassistant.components import light
from homeassistant.components.light import SUPPORT_EFFECT, SUPPORT_TRANSITION
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.common import async_fire_mqtt_message
from tests.components.light import common
async def test_attributes_on_off(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert state.attributes.get("supported_features") == 0
assert state.attributes.get("supported_color_modes") == ["onoff"]
assert state.attributes.get("color_mode") == "onoff"
async def test_attributes_dimmer_tuya(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (dimmer)
config["ty"] = 1 # Tuya device
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert state.attributes.get("supported_features") == 0
assert state.attributes.get("supported_color_modes") == ["brightness"]
assert state.attributes.get("color_mode") == "brightness"
async def test_attributes_dimmer(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (dimmer)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert state.attributes.get("supported_features") == SUPPORT_TRANSITION
assert state.attributes.get("supported_color_modes") == ["brightness"]
assert state.attributes.get("color_mode") == "brightness"
async def test_attributes_ct(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 2 # 2 channel light (CW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") == 153
assert state.attributes.get("max_mireds") == 500
assert state.attributes.get("supported_features") == SUPPORT_TRANSITION
assert state.attributes.get("supported_color_modes") == ["color_temp"]
assert state.attributes.get("color_mode") == "color_temp"
async def test_attributes_ct_reduced(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 2 # 2 channel light (CW)
config["so"]["82"] = 1 # Reduced CT range
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") is None
assert state.attributes.get("min_mireds") == 200
assert state.attributes.get("max_mireds") == 380
assert state.attributes.get("supported_features") == SUPPORT_TRANSITION
assert state.attributes.get("supported_color_modes") == ["color_temp"]
assert state.attributes.get("color_mode") == "color_temp"
async def test_attributes_rgb(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 3 # 3 channel light (RGB)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert (
state.attributes.get("supported_features")
== SUPPORT_EFFECT | SUPPORT_TRANSITION
)
assert state.attributes.get("supported_color_modes") == ["rgb"]
assert state.attributes.get("color_mode") == "rgb"
async def test_attributes_rgbw(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 4 # 4 channel light (RGBW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") is None
assert state.attributes.get("max_mireds") is None
assert (
state.attributes.get("supported_features")
== SUPPORT_EFFECT | SUPPORT_TRANSITION
)
assert state.attributes.get("supported_color_modes") == ["rgb", "rgbw"]
assert state.attributes.get("color_mode") == "rgbw"
async def test_attributes_rgbww(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") == 153
assert state.attributes.get("max_mireds") == 500
assert (
state.attributes.get("supported_features")
== SUPPORT_EFFECT | SUPPORT_TRANSITION
)
assert state.attributes.get("supported_color_modes") == ["color_temp", "rgb"]
assert state.attributes.get("color_mode") == "color_temp"
async def test_attributes_rgbww_reduced(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
config["so"]["82"] = 1 # Reduced CT range
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.attributes.get("effect_list") == [
"None",
"Wake up",
"Cycle up",
"Cycle down",
"Random",
]
assert state.attributes.get("min_mireds") == 200
assert state.attributes.get("max_mireds") == 380
assert (
state.attributes.get("supported_features")
== SUPPORT_EFFECT | SUPPORT_TRANSITION
)
assert state.attributes.get("supported_color_modes") == ["color_temp", "rgb"]
assert state.attributes.get("color_mode") == "color_temp"
async def test_controlling_state_via_mqtt_on_off(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_mode") == "onoff"
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_mode") == "onoff"
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert "color_mode" not in state.attributes
async def test_controlling_state_via_mqtt_ct(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 2 # 2 channel light (CT)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert "color_mode" not in state.attributes
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","CT":300}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
assert state.attributes.get("color_mode") == "color_temp"
# Tasmota will send "Color" also for CT light, this should be ignored
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Color":"255,128"}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_mode") == "color_temp"
async def test_controlling_state_via_mqtt_rgbw(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 4 # 4 channel light (RGBW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_mode") == "rgbw"
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert "color_mode" not in state.attributes
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_mode") == "rgbw"
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/STATE",
'{"POWER":"ON","Color":"128,64,0","White":0}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("rgb_color") == (255, 128, 0)
assert state.attributes.get("rgbw_color") == (255, 128, 0, 0)
assert state.attributes.get("color_mode") == "rgbw"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("rgb_color") == (255, 192, 128)
assert state.attributes.get("rgbw_color") == (255, 128, 0, 255)
assert state.attributes.get("color_mode") == "rgbw"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":0}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 0
assert state.attributes.get("rgb_color") == (0, 0, 0)
assert state.attributes.get("rgbw_color") == (0, 0, 0, 0)
assert state.attributes.get("color_mode") == "rgbw"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Scheme":3}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("effect") == "Cycle down"
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async def test_controlling_state_via_mqtt_rgbww(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert "color_mode" not in state.attributes
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/STATE",
'{"POWER":"ON","Color":"128,64,0","White":0}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("rgb_color") == (255, 128, 0)
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert "white_value" not in state.attributes
# Setting white > 0 should clear the color
assert "rgb_color" not in state.attributes
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","CT":300}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":0}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
# Setting white to 0 should clear the color_temp
assert "white_value" not in state.attributes
assert "color_temp" not in state.attributes
assert state.attributes.get("rgb_color") == (255, 128, 0)
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Scheme":3}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("effect") == "Cycle down"
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async def test_controlling_state_via_mqtt_rgbww_hex(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
config["so"]["17"] = 0 # Hex color in state updates
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert "color_mode" not in state.attributes
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Color":"804000","White":0}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("rgb_color") == (255, 128, 0)
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Color":"0080400000"}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("rgb_color") == (0, 255, 128)
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert "white_value" not in state.attributes
# Setting white > 0 should clear the color
assert "rgb_color" not in state.attributes
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","CT":300}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":0}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
# Setting white to 0 should clear the white_value and color_temp
assert not state.attributes.get("white_value")
assert not state.attributes.get("color_temp")
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Scheme":3}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("effect") == "Cycle down"
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async def test_controlling_state_via_mqtt_rgbww_tuya(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
config["ty"] = 1 # Tuya device
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert not state.attributes.get(ATTR_ASSUMED_STATE)
assert "color_mode" not in state.attributes
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
assert "color_mode" not in state.attributes
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/STATE",
'{"POWER":"ON","Color":"128,64,0","White":0}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("rgb_color") == (255, 128, 0)
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/STATE",
'{"POWER":"ON","Dimmer":0}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("rgb_color") == (0, 0, 0)
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50,"White":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert "white_value" not in state.attributes
# Setting white > 0 should clear the color
assert "rgb_color" not in state.attributes
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","CT":300}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("color_temp") == 300
assert state.attributes.get("color_mode") == "color_temp"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","White":0}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
# Setting white to 0 should clear the white_value and color_temp
assert not state.attributes.get("white_value")
assert not state.attributes.get("color_temp")
assert state.attributes.get("color_mode") == "rgb"
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Scheme":3}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("effect") == "Cycle down"
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"ON"}')
state = hass.states.get("light.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "tasmota_49A3BC/stat/RESULT", '{"POWER":"OFF"}')
state = hass.states.get("light.test")
assert state.state == STATE_OFF
async def test_sending_mqtt_commands_on_off(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Power1", "ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Power1", "OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
async def test_sending_mqtt_commands_rgbww_tuya(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
config["ty"] = 1 # Tuya device
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT messages are sent
await common.async_turn_on(hass, "light.test", brightness=192)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Dimmer3 75", 0, False
)
async def test_sending_mqtt_commands_rgbw_legacy(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["sw"] = "9.4.0.3" # RGBW support was added in 9.4.0.4
config["rl"][0] = 2
config["lt_st"] = 4 # 4 channel light (RGBW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT messages are sent
await common.async_turn_on(hass, "light.test", brightness=192)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Dimmer 75", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Set color when setting color
await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 32])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Color2 128,64,32",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Set color when setting white is off
await common.async_turn_on(hass, "light.test", rgbw_color=[128, 64, 32, 0])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Color2 128,64,32",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Set white when white is on
await common.async_turn_on(hass, "light.test", rgbw_color=[16, 64, 32, 128])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;White 50",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", white_value=128)
# white_value should be ignored
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", effect="Random")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Scheme 4",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_sending_mqtt_commands_rgbw(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 4 # 4 channel light (RGBW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT messages are sent
await common.async_turn_on(hass, "light.test", brightness=192)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Dimmer4 75", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Set color when setting color
await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 32])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Color2 128,64,32",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Set color when setting white is off
await common.async_turn_on(hass, "light.test", rgbw_color=[128, 64, 32, 0])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Color2 128,64,32,0",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Set white when white is on
await common.async_turn_on(hass, "light.test", rgbw_color=[16, 64, 32, 128])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Color2 16,64,32,128",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", white_value=128)
# white_value should be ignored
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", effect="Random")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Scheme 4",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_sending_mqtt_commands_rgbww(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT messages are sent
await common.async_turn_on(hass, "light.test", brightness=192)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Dimmer 75", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", rgb_color=[128, 64, 32])
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Color2 128,64,32",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", color_temp=200)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;CT 200",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", white_value=128)
# white_value should be ignored
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_on(hass, "light.test", effect="Random")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Power1 ON;NoDelay;Scheme 4",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_sending_mqtt_commands_power_unlinked(hass, mqtt_mock, setup_tasmota):
"""Test the sending MQTT commands to a light with unlinked dimlevel and power."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (dimmer)
config["so"]["20"] = 1 # Update of Dimmer/Color/CT without turning power on
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 ON", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Tasmota is not optimistic, the state should still be off
state = hass.states.get("light.test")
assert state.state == STATE_OFF
# Turn the light off and verify MQTT message is sent
await common.async_turn_off(hass, "light.test")
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog", "NoDelay;Power1 OFF", 0, False
)
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT messages are sent; POWER should be sent
await common.async_turn_on(hass, "light.test", brightness=192)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Dimmer 75;NoDelay;Power1 ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_transition(hass, mqtt_mock, setup_tasmota):
"""Test transition commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->100: Speed should be 4*2=8
await common.async_turn_on(hass, "light.test", brightness=255, transition=4)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 8;NoDelay;Dimmer 100",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->100: Speed should be capped at 40
await common.async_turn_on(hass, "light.test", brightness=255, transition=100)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 40;NoDelay;Dimmer 100",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->0: Speed should be 1
await common.async_turn_on(hass, "light.test", brightness=0, transition=100)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 1;NoDelay;Power1 OFF",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->50: Speed should be 4*2*2=16
await common.async_turn_on(hass, "light.test", brightness=128, transition=4)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 16;NoDelay;Dimmer 50",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Fake state update from the light
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
# Dim the light from 50->0: Speed should be 6*2*2=24
await common.async_turn_off(hass, "light.test", transition=6)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 24;NoDelay;Power1 OFF",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Fake state update from the light
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":100}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 255
# Dim the light from 100->0: Speed should be 0
await common.async_turn_off(hass, "light.test", transition=0)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 0;NoDelay;Power1 OFF",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Fake state update from the light
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/STATE",
'{"POWER":"ON","Dimmer":50, "Color":"0,255,0", "White":0}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("rgb_color") == (0, 255, 0)
# Set color of the light from 0,255,0 to 255,0,0 @ 50%: Speed should be 6*2*2=24
await common.async_turn_on(hass, "light.test", rgb_color=[255, 0, 0], transition=6)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 24;NoDelay;Power1 ON;NoDelay;Color2 255,0,0",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Fake state update from the light
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/STATE",
'{"POWER":"ON","Dimmer":100, "Color":"0,255,0"}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 255
assert state.attributes.get("rgb_color") == (0, 255, 0)
# Set color of the light from 0,255,0 to 255,0,0 @ 100%: Speed should be 6*2=12
await common.async_turn_on(hass, "light.test", rgb_color=[255, 0, 0], transition=6)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 12;NoDelay;Power1 ON;NoDelay;Color2 255,0,0",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Fake state update from the light
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/tele/STATE",
'{"POWER":"ON","Dimmer":50, "CT":153, "White":50}',
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_temp") == 153
# Set color_temp of the light from 153 to 500 @ 50%: Speed should be 6*2*2=24
await common.async_turn_on(hass, "light.test", color_temp=500, transition=6)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 24;NoDelay;Power1 ON;NoDelay;CT 500",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Fake state update from the light
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"POWER":"ON","Dimmer":50, "CT":500}'
)
state = hass.states.get("light.test")
assert state.state == STATE_ON
assert state.attributes.get("brightness") == 127.5
assert state.attributes.get("color_temp") == 500
# Set color_temp of the light from 500 to 326 @ 50%: Speed should be 6*2*2*2=48->40
await common.async_turn_on(hass, "light.test", color_temp=326, transition=6)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 40;NoDelay;Power1 ON;NoDelay;CT 326",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_transition_fixed(hass, mqtt_mock, setup_tasmota):
"""Test transition commands."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 5 # 5 channel light (RGBCW)
config["so"]["117"] = 1 # fading at fixed duration instead of fixed slew rate
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("light.test")
assert state.state == STATE_OFF
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->100: Speed should be 4*2=8
await common.async_turn_on(hass, "light.test", brightness=255, transition=4)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 8;NoDelay;Dimmer 100",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->100: Speed should be capped at 40
await common.async_turn_on(hass, "light.test", brightness=255, transition=100)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 40;NoDelay;Dimmer 100",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->0: Speed should be 4*2=8
await common.async_turn_on(hass, "light.test", brightness=0, transition=4)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 8;NoDelay;Power1 OFF",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->50: Speed should be 4*2=8
await common.async_turn_on(hass, "light.test", brightness=128, transition=4)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 1;NoDelay;Speed2 8;NoDelay;Dimmer 50",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light from 0->50: Speed should be 0
await common.async_turn_on(hass, "light.test", brightness=128, transition=0)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
"NoDelay;Fade2 0;NoDelay;Dimmer 50",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
async def test_relay_as_light(hass, mqtt_mock, setup_tasmota):
"""Test relay show up as light in light mode."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
state = hass.states.get("switch.test")
assert state is None
state = hass.states.get("light.test")
assert state is not None
async def _test_split_light(hass, mqtt_mock, config, num_lights, num_switches):
"""Test multi-channel light split to single-channel dimmers."""
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("switch")) == num_switches
assert len(hass.states.async_entity_ids("light")) == num_lights
lights = hass.states.async_entity_ids("light")
for idx, entity in enumerate(lights):
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, entity)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Power{idx+num_switches+1} ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light and verify MQTT message is sent
await common.async_turn_on(hass, entity, brightness=(idx + 1) * 25.5)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Channel{idx+num_switches+1} {(idx+1)*10}",
0,
False,
)
async def test_split_light(hass, mqtt_mock, setup_tasmota):
"""Test multi-channel light split to single-channel dimmers."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["rl"][1] = 2
config["rl"][2] = 2
config["rl"][3] = 2
config["rl"][4] = 2
config["so"][68] = 1 # Multi-channel PWM instead of a single light
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_split_light(hass, mqtt_mock, config, 5, 0)
async def test_split_light2(hass, mqtt_mock, setup_tasmota):
"""Test multi-channel light split to single-channel dimmers."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["rl"][1] = 1
config["rl"][2] = 2
config["rl"][3] = 2
config["rl"][4] = 2
config["rl"][5] = 2
config["rl"][6] = 2
config["so"][68] = 1 # Multi-channel PWM instead of a single light
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_split_light(hass, mqtt_mock, config, 5, 2)
async def _test_unlinked_light(hass, mqtt_mock, config, num_switches):
"""Test rgbww light split to rgb+ww."""
mac = config["mac"]
num_lights = 2
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("switch")) == num_switches
assert len(hass.states.async_entity_ids("light")) == num_lights
lights = hass.states.async_entity_ids("light")
for idx, entity in enumerate(lights):
mqtt_mock.async_publish.reset_mock()
# Turn the light on and verify MQTT message is sent
await common.async_turn_on(hass, entity)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Power{idx+num_switches+1} ON",
0,
False,
)
mqtt_mock.async_publish.reset_mock()
# Dim the light and verify MQTT message is sent
await common.async_turn_on(hass, entity, brightness=(idx + 1) * 25.5)
mqtt_mock.async_publish.assert_called_once_with(
"tasmota_49A3BC/cmnd/Backlog",
f"NoDelay;Dimmer{idx+1} {(idx+1)*10}",
0,
False,
)
async def test_unlinked_light(hass, mqtt_mock, setup_tasmota):
"""Test rgbww light split to rgb+ww."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["rl"][1] = 2
config["lk"] = 0 # RGB + white channels unlinked
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_unlinked_light(hass, mqtt_mock, config, 0)
async def test_unlinked_light2(hass, mqtt_mock, setup_tasmota):
"""Test rgbww light split to rgb+ww."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["rl"][1] = 1
config["rl"][2] = 2
config["rl"][3] = 2
config["lk"] = 0 # RGB + white channels unlinked
config["lt_st"] = 5 # 5 channel light (RGBCW)
await _test_unlinked_light(hass, mqtt_mock, config, 2)
async def test_discovery_update_reconfigure_light(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test reconfigure of discovered light."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 2
config2["lt_st"] = 3 # 3 channel light (RGB)
data1 = json.dumps(config)
data2 = json.dumps(config2)
# Simple dimmer
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data1)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert state.attributes.get("supported_features") == SUPPORT_TRANSITION
assert state.attributes.get("supported_color_modes") == ["brightness"]
# Reconfigure as RGB light
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data2)
await hass.async_block_till_done()
state = hass.states.get("light.test")
assert (
state.attributes.get("supported_features")
== SUPPORT_EFFECT | SUPPORT_TRANSITION
)
assert state.attributes.get("supported_color_modes") == ["rgb"]
async def test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test availability after MQTT disconnection."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_availability_when_connection_lost(
hass, mqtt_client_mock, mqtt_mock, light.DOMAIN, config
)
async def test_availability(hass, mqtt_mock, setup_tasmota):
"""Test availability."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_availability(hass, mqtt_mock, light.DOMAIN, config)
async def test_availability_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test availability discovery update."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_availability_discovery_update(hass, mqtt_mock, light.DOMAIN, config)
async def test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, setup_tasmota
):
"""Test polling after MQTT connection (re)established."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
poll_topic = "tasmota_49A3BC/cmnd/STATE"
await help_test_availability_poll_state(
hass, mqtt_client_mock, mqtt_mock, light.DOMAIN, config, poll_topic, ""
)
async def test_discovery_removal_light(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered light."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 2
config1["lt_st"] = 1 # 1 channel light (Dimmer)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 0
config2["lt_st"] = 0
await help_test_discovery_removal(
hass, mqtt_mock, caplog, light.DOMAIN, config1, config2
)
async def test_discovery_removal_relay_as_light(hass, mqtt_mock, caplog, setup_tasmota):
"""Test removal of discovered relay as light."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 1
config1["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 1
config2["so"]["30"] = 0 # Disable Home Assistant auto-discovery as light
await help_test_discovery_removal(
hass, mqtt_mock, caplog, light.DOMAIN, config1, config2
)
async def test_discovery_removal_relay_as_light2(
hass, mqtt_mock, caplog, setup_tasmota
):
"""Test removal of discovered relay as light."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config1["rl"][0] = 1
config1["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["rl"][0] = 0
config2["so"]["30"] = 0 # Disable Home Assistant auto-discovery as light
await help_test_discovery_removal(
hass, mqtt_mock, caplog, light.DOMAIN, config1, config2
)
async def test_discovery_update_unchanged_light(hass, mqtt_mock, caplog, setup_tasmota):
"""Test update of discovered light."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
with patch(
"homeassistant.components.tasmota.light.TasmotaLight.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, light.DOMAIN, config, discovery_update
)
async def test_discovery_device_remove(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
unique_id = f"{DEFAULT_CONFIG['mac']}_light_light_0"
await help_test_discovery_device_remove(
hass, mqtt_mock, light.DOMAIN, unique_id, config
)
async def test_discovery_device_remove_relay_as_light(hass, mqtt_mock, setup_tasmota):
"""Test device registry remove."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 1
config["so"]["30"] = 1 # Enforce Home Assistant auto-discovery as light
unique_id = f"{DEFAULT_CONFIG['mac']}_light_relay_0"
await help_test_discovery_device_remove(
hass, mqtt_mock, light.DOMAIN, unique_id, config
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock, setup_tasmota):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
topics = [
get_topic_stat_result(config),
get_topic_tele_state(config),
get_topic_tele_will(config),
]
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, light.DOMAIN, config, topics
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock, setup_tasmota):
"""Test MQTT discovery update when entity_id is updated."""
config = copy.deepcopy(DEFAULT_CONFIG)
config["rl"][0] = 2
config["lt_st"] = 1 # 1 channel light (Dimmer)
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, light.DOMAIN, config
)
|
{
"content_hash": "e1ce5d7fd1397c46d4c8ee4de9233329",
"timestamp": "",
"source": "github",
"line_count": 1816,
"max_line_length": 88,
"avg_line_length": 35.625,
"alnum_prop": 0.6443310920472989,
"repo_name": "kennedyshead/home-assistant",
"id": "b74799d1d122bd3a16ffc2fdda674e1326636785",
"size": "64695",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/tasmota/test_light.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "33970989"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
import traceback
from urllib.request import urlopen
import json
from time import sleep
import geomath
import math
from datetime import datetime
from configparser import ConfigParser
# Read the configuration file for this application.
parser = ConfigParser()
parser.read('config.ini')
# Assign receiver variables.
receiver_latitude = float(parser.get('receiver', 'latitude'))
receiver_longitude = float(parser.get('receiver', 'longitude'))
class FlightData():
def __init__(self, data_url=None, parser=None):
self.data_url = data_url
self.parser = parser
self.aircraft = None
self.refresh()
def refresh(self):
try:
#open the data url
self.req = urlopen(self.data_url)
#read data from the url
self.raw_data = self.req.read()
#load in the json
self.json_data = json.loads(self.raw_data.decode())
#get time from json
self.time = datetime.fromtimestamp(self.parser.time(self.json_data))
#load all the aircarft
self.aircraft = self.parser.aircraft_data(self.json_data, self.time)
except Exception:
print("exception in FlightData.refresh():")
traceback.print_exc()
class AirCraftData():
def __init__(self,
dhex,
squawk,
flight,
registration,
lat,
lon,
altitude,
vert_rate,
track,
speed,
messages,
seen,
mlat,
nucp,
seen_pos,
rssi,
dist,
az,
el,
time):
self.hex = dhex
self.squawk = squawk
self.flight = flight
self.registration = registration
self.lat = lat
self.lon = lon
self.altitude = altitude
self.vert_rate = vert_rate
self.track = track
self.speed = speed
self.messages = messages
self.seen = seen
self.mlat = mlat
self.nucp = nucp
self.seen_pos = seen_pos
self.rssi = rssi
self.distance = dist
self.az = az
self.el = el
self.time = time
def __str__(self):
return '<{} {} dist={} el={}>'.format(
self.__class__.__name__,
self.ident_desc(),
self.distance,
self.el)
def ident_desc(self):
idents = [self.hex, self.registration]
if self.flight != self.registration:
idents.append(self.flight)
idents = [i for i in idents if i]
return '/'.join(idents)
class AircraftDataParser(object):
def __init__(self):
pass
def aircraft_data(self, json_data, time):
raise NotImplementedError
def time(self, json_data):
raise NotImplementedError
class VRSDataParser(AircraftDataParser):
def _parse_aircraft_data(self, a, time):
alt = a.get('Alt', 0)
dist = -1
az = 0
el = 0
if 'Lat' in a and 'Long' in a:
rec_pos = (receiver_latitude, receiver_longitude)
ac_pos = (a['Lat'], a['Long'])
dist = geomath.distance(rec_pos, ac_pos)
az = geomath.bearing(rec_pos, ac_pos)
el = math.degrees(math.atan(alt / (dist * 5280)))
speed = 0
if 'Spd' in a:
speed = geomath.knot2mph(a['Spd'])
if 'PosTime' in a:
last_seen_time = datetime.fromtimestamp(a['PosTime'] / 1000.0)
seen = (time - last_seen_time).total_seconds()
else:
seen = 0
ac_data = AirCraftData(
a.get('Icao', None).upper(),
a.get('Sqk', None),
a.get('Call', None),
a.get('Reg', None),
a.get('Lat', None),
a.get('Long', None),
alt,
a.get('Vsi', 0),
a.get('Trak', None),
speed,
a.get('CMsgs', None),
seen,
a.get('Mlat', False),
None, # NUCP
None, # Seen pos
10.0 * math.log10(a.get('Sig', 0) / 255.0 + 1e-5),
dist,
az,
el,
time)
return ac_data
def aircraft_data(self, json_data, time):
aircraft_list = [self._parse_aircraft_data(d, time) for d in json_data['acList']]
return aircraft_list
def time(self, json_data):
return json_data['stm'] / 1000.0
class Dump1090DataParser(AircraftDataParser):
def aircraft_data(self, json_data, time):
aircraft_list = []
for a in json_data["aircraft"]:
alt = a["altitude"] if "altitude" in a else 0
if alt == "ground":
alt = 0
dist = -1
az = 0
el = 0
if "lat" in a and "lon" in a:
dist = geomath.distance((receiver_latitude, receiver_longitude), (a["lat"], a["lon"]))
az = geomath.bearing((receiver_latitude, receiver_longitude), (a["lat"], a["lon"]))
el = math.degrees(math.atan(alt / (dist*5280)))
speed = 0
if "speed" in a:
speed = geomath.knot2mph(a["speed"])
aircraftdata = AirCraftData(
a["hex"].upper() if "hex" in a else None,
a["squawk"] if "squawk" in a else None,
a["flight"] if "flight" in a else None,
None,
a["lat"] if "lat" in a else None,
a["lon"] if "lon" in a else None,
alt,
a["vert_rate"] if "vert_rate" in a else 0,
a["track"] if "track" in a else None,
speed,
a["messages"] if "messages" in a else None,
a["seen"] if "seen" in a else None,
a["mlat"] if "mlat" in a else None,
a["nucp"] if "nucp" in a else None,
a["seen_pos"] if "seen_pos" in a else None,
a["rssi"] if "rssi" in a else None,
dist,
az,
el,
time)
aircraft_list.append(aircraftdata)
return aircraft_list
def time(self, json_data):
return json_data['now']
if __name__ == "__main__":
import os
flightdata = FlightData()
while True:
os.system('clear')
print("Now: {}".format(flightdata.time.strftime('%Y-%m-%d %H:%M:%S')))
print("| icao | flight | miles | az | el | alt | mi/h | vert | rssi | mesgs | seen |")
print("|---------+---------+-------+-------+------+-------+-------+-------+-------+-------+------|")
sortedlist = []
for a in flightdata.aircraft:
if a.lat == None or a.lon == None:
continue
sortedlist.append(a)
sortedlist.sort(key=lambda x: x.distance) # actually do the sorting here
for a in sortedlist:
print("| {:<7} | {:^8}| {:>5} | {:>5} | {:>4} | {:>5} | {:>5} | {:>+5} | {:>5} | {:>5} | {:>4} |".format(
a.hex,
a.flight,
"%.1f" % a.distance,
"%.1f" % a.az,
"%.1f" % a.el,
a.altitude,
"%.1f" % a.speed,
a.vert_rate,
"%0.1f" % a.rssi,
a.messages,
"%.1f" % a.seen))
sleep(0.5)
flightdata.refresh()
|
{
"content_hash": "df12f76ce0b55278f82706c5582418a1",
"timestamp": "",
"source": "github",
"line_count": 246,
"max_line_length": 117,
"avg_line_length": 30.963414634146343,
"alnum_prop": 0.4705264539845083,
"repo_name": "kevinabrandon/AboveTustin",
"id": "167644f2dcfe5f6cf6cef9282693395898e34b56",
"size": "8941",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flightdata.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29873"
},
{
"name": "Shell",
"bytes": "413"
}
],
"symlink_target": ""
}
|
import unittest
import seed_database
class parseIntoProbabilityHashTests(unittest.TestCase):
def testKeySetting(self):
string2 = "This apple is red."
one = "this apple"
two = "apple is"
three = "is red"
outcome = {one: 1, two: 1, three: 1}
probabilityHash2 = seed_database.parseIntoProbabilityHash(string2)
self.assertEqual(probabilityHash2, outcome)
def testIncrementingValue(self):
string = "How are you doing today, my friend? Are you happy or are you sad? How are your kids doing today?"
probabilityHash = seed_database.parseIntoProbabilityHash(string)
self.assertEqual(probabilityHash["are you"], 3)
def test_formatting_hash_method(self):
string1 = "hello there"
string2 = "goodbye there"
hash = {"hello there": 1}
result1 = seed_databse.format_hash(hash, string1)
self.assertEqual(result1, {"hello there": 2})
result2 = seed_databse.format_hash(hash, string2)
self.assertEqual(result2, {"hello there": 1, "goodbye there": 1})
def main():
unittest.main()
if __name__ == '__main__':
main()
|
{
"content_hash": "172f75186af176b9db3b19fc9474813c",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 111,
"avg_line_length": 32.81818181818182,
"alnum_prop": 0.6897506925207756,
"repo_name": "hollabaq86/haikuna-matata",
"id": "1ad6af97cfd12c3f21cd00a19fef64a29785c5d9",
"size": "1083",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "seed_database_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2628"
},
{
"name": "HTML",
"bytes": "6169"
},
{
"name": "JavaScript",
"bytes": "2841"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "19501"
}
],
"symlink_target": ""
}
|
import flask
from flask import Flask, render_template, request
import json
import time
app = Flask(__name__)
form_post_count = 0
# Ensure application exceptions are raised.
app.debug = True
class AppError(Exception):
pass
@app.route("/")
def hello():
resp = flask.make_response("""Hello world! <a href="with_html">Relative</a>""")
resp.set_cookie("capybara", "root cookie")
return resp
@app.route("/foo")
def foo():
return "Another World"
@app.route("/redirect", methods=["GET", "POST"])
def redirect():
return flask.redirect("/redirect_again")
@app.route("/redirect_again")
def redirect_again():
return flask.redirect("/landed")
@app.route("/redirect_307", methods=["POST"])
def redirect_307():
return flask.redirect("/landed", code=307)
@app.route("/redirect_308", methods=["POST"])
def redirect_308():
return flask.redirect("/landed", code=308)
@app.route("/referrer_base")
def referrer_base():
return """
<a href="/get_referrer">direct link</a>
<a href="/redirect_to_get_referrer">link via redirect</a>
<form action="/get_referrer" method="get"><input type="submit"></form>
"""
@app.route("/redirect_to_get_referrer")
def redirect_to_get_referrer():
return flask.redirect("/get_referrer")
@app.route("/get_referrer")
def get_referrer():
return "No referrer" if request.referrer is None else "Got referrer: {0}".format(request.referrer)
@app.route("/host")
def host():
return "Current host is {0}://{1}".format(request.scheme, request.host)
@app.route("/redirect/<int:times>/times")
def redirect_n_times(times):
if times == 0:
return "redirection complete"
else:
return flask.redirect("/redirect/{0}/times".format(times - 1))
@app.route("/landed", methods=["GET"])
def landed():
return "You landed"
@app.route("/landed", methods=["POST"])
def post_landed():
return "You post landed: {}".format(request.form.get('form[data]'))
@app.route("/with-quotes")
def with_quotes():
return "\"No,\" he said, \"you can't do that.\""
@app.route("/form/get", methods=["GET"])
@app.route("/relative", methods=["POST"])
@app.route("/form", methods=["POST"])
def results():
global form_post_count
form_post_count += 1
data = request.args.copy()
data.update(request.form)
data.update({"post_count": form_post_count})
return """<pre id="results">""" + json.dumps(data.to_dict(flat=False)) + """</pre>"""
@app.route("/favicon.ico")
def favicon():
return ""
@app.route("/delete", methods=["DELETE"])
def deleted():
return "The requested object was deleted"
@app.route("/delete", methods=["GET"])
def not_deleted():
return "Not deleted"
@app.route("/redirect_back")
def redirect_back():
return flask.redirect(request.referrer)
@app.route("/redirect_secure")
def redirect_secure():
return flask.redirect("http://{0}/host".format(request.host))
@app.route("/slow_response")
def slow_response():
time.sleep(2)
return "Finally!"
@app.route("/set_cookie")
def set_cookie():
cookie_value = "test_cookie"
resp = flask.make_response("Cookie set to {0}".format(cookie_value))
resp.set_cookie("capybara", cookie_value)
return resp
@app.route("/get_cookie")
def get_cookie():
return request.cookies.get("capybara", "")
@app.route("/get_header")
def get_header():
return request.headers.get("Foo", "")
@app.route("/get_header_via_redirect")
def get_header_via_redirect():
return flask.redirect("/get_header")
@app.route("/error")
def error():
raise AppError()
@app.route("/import_error")
def import_error():
raise ImportError("Simulated ImportError")
@app.route("/with_html")
def with_html():
return render_template("with_html.html")
@app.route("/with_simple_html")
def with_simple_html():
return render_template("with_simple_html.html")
@app.route("/<name>")
def view(name):
return render_template("{}.html".format(name))
@app.route("/upload_empty", methods=["POST"])
def upload_empty():
f = request.files.get("form[file]")
if not f:
return "Successfully ignored empty file field."
else:
return "Something went wrong."
@app.route("/upload", methods=["POST"])
def upload():
document = request.files.get("form[document]")
if document and document.filename:
buf = []
buf.append("Content-type: {0}".format(document.mimetype))
buf.append("File content: {0}".format(document.read()))
return " | ".join(buf)
else:
return "No file uploaded"
@app.route("/upload_multiple", methods=["POST"])
def upload_multiple():
documents = request.files.getlist("form[multiple_documents][]")
documents = [doc for doc in documents if doc.filename]
if len(documents):
buf = [str(len(documents))]
for document in documents:
buf.append("Content-type: {0}".format(document.mimetype))
buf.append("File content: {0}".format(document.read()))
return " | ".join(buf)
else:
return "No files uploaded"
if __name__ == "__main__":
app.run()
|
{
"content_hash": "83944bab6b85dd30e0c9fd20b66b9d9e",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 102,
"avg_line_length": 22.32751091703057,
"alnum_prop": 0.6416976334832779,
"repo_name": "elliterate/capybara.py",
"id": "eaa18cb77706ffa6c124ae5d5c08a69ccc6e36a1",
"size": "5113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "capybara/tests/app/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "38254"
},
{
"name": "JavaScript",
"bytes": "5225"
},
{
"name": "Python",
"bytes": "573480"
}
],
"symlink_target": ""
}
|
import sys
import os
import time
__author__ = 'andymhan'
# Output example: [======= ] 75%
# width defines bar width
# percent defines current percentage
def progress(width, percent):
linestr = "\r%s %d%%\r"%(('%%-%ds' % width) % (int(width * percent/100) * '='), percent)
os.write(1,bytes(linestr, 'UTF-8'))
sys.stdout.flush()
if percent >= 100:
print
sys.stdout.flush()
# Simulate doing something ...
# for i in range(100):
# progress(100, (i + 1))
# time.sleep(0.1) # Slow it down for demo
def hamming_distance(hash1, hash2):
hashbits = 64
x = (int(hash1) ^ int(hash2)) & ((1 << hashbits) - 1)
tot = 0
while x:
tot += 1
x &= x-1
return tot
hash1 = 15907347255682217840
hash2 = 15907347667999076208
t = hamming_distance(hash1,hash2)
print(t)
class data2db_proc():
def __init__(self):
self.data_eventdb_map = {"strEventID":'Res.Event.uiEventID',
"strSendTime":'Res.Event.uiSendTime',
"strCity":'Res.Event.strCity',
"strContentTime":'Res.Event.strContentTime',
"strContentCity": 'Res.Event.strContentCity',
"uiHot":'Res.Event.uiHot',
"uiEvilScore": 'Res.Event.uiEvilScore',
"strEventInfo": 'Res.Event.strEventInfo',
"struiGuaidianTime":'Res.Event.uiGuaidianTime',
"strAcceleratedSpeed":'Res.Event.uiAcceleratedSpeed',
"strStep1Time":'Res.Event.uiStep1Time',
"strStep2Time":'Res.Event.uiStep2Time',
"strStep3Time":'Res.Event.uiStep3Time',
"strStep4Time":'Res.Event.uiStep4Time',
"uiMsgNum":'Res.Msg.uiMsgNum'}
self.data_msgdb_map = {"strEventID":'Res.Event.uiEventID',
"uiAppID":'Comm.uiAppID',
"uiSpecialClass":'Comm.uiSpecialClass',
"uiOtherClass":'Comm.uiOtherClass',
"ullUin":'Comm.ullUin',
"uiSendIP":'Comm.uiSendIP',
"strSendTime":'Comm.uiSendTime',
"strContentID":'ID.strContentID',
"strTitle":'Content.strTitle',
"strContent":'Content.strContent',
"uiImgNum":'Content.uiImgNum',
"uiVideoNum":'Content.uiVideoNum',
"strContentUrl":'Content.strContentUrl',
"strBuInfo":'Content.strBuInfo',
"strCountry":'Content.strCountry',
"strProvince":'Content.strProvince',
"strCity":'Content.strCity',
"uiHotLevel":'Content.uiHotLevel',
"uiContentStyle":'Content.uiContentStyle',
"ullTtitleSimHash":'Content.ullTtitleSimHash',
"ullContentSimHash":'Content.ullContentSimHash',
"strSplitWords":'Content.strSplitWords',
"uiEvilScore":'Res.Event.uiEvilScore'}
self.evt_data = []
self.msg_data = []
self.evt_db_item = self.data_eventdb_map.keys()
self.msg_db_item = self.data_msgdb_map.keys()
self.evt_db_item_str = ','.join(self.evt_db_item)
t = data2db_proc()
print(t.evt_db_item_str)
print(type(t.evt_db_item_str))
import xlrd
import datetime
data = xlrd.open_workbook('f://tencent//test.xls')
table = data.sheets()[0]
nrows = table.nrows
ncols = table.ncols
row = table.row_values(0)
print(row[0])
type(row[0])
if isinstance(row[0],basestring):
print(row[0])
commit_date = datetime.datetime.strptime(row[0].strip(),"%Y/%m/%d")
else:
commit_date = xlrd.xldate.xldate_as_datetime(row[0],0)
print(commit_date)
commit_date_str = commit_date.strftime("%Y-%m-%d")
print(commit_date_str)
|
{
"content_hash": "da999803e92c06e59f07f8885ac62e10",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 92,
"avg_line_length": 32.87155963302752,
"alnum_prop": 0.6008931063354731,
"repo_name": "andymhan/NLP_TEST",
"id": "3d5096425109dbb358db15914fd858b55443c148",
"size": "3583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "comm_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1764"
},
{
"name": "Python",
"bytes": "7305"
}
],
"symlink_target": ""
}
|
"""Image represented by either a URI or byte stream."""
from base64 import b64encode
from google.cloud._helpers import _to_bytes
from google.cloud._helpers import _bytes_to_unicode
from google.cloud.vision.annotations import Annotations
from google.cloud.vision.feature import Feature
from google.cloud.vision.feature import FeatureTypes
class Image(object):
"""Image representation containing information to be annotate.
:type content: bytes
:param content: Byte stream of an image.
:type filename: str
:param filename: Filename to image.
:type source_uri: str
:param source_uri: Google Cloud Storage URI of image.
:type client: :class:`~google.cloud.vision.client.Client`
:param client: Instance of Vision client.
"""
def __init__(self, client, content=None, filename=None, source_uri=None):
sources = [source for source in (content, filename, source_uri)
if source is not None]
if len(sources) != 1:
raise ValueError(
'Specify exactly one of "content", "filename", or '
'"source_uri".')
self.client = client
if filename is not None:
with open(filename, 'rb') as file_obj:
content = file_obj.read()
if content is not None:
content = _bytes_to_unicode(b64encode(_to_bytes(content)))
self._content = content
self._source = source_uri
def as_dict(self):
"""Generate dictionary structure for request.
:rtype: dict
:returns: Dictionary with source information for image.
"""
if self.content:
return {
'content': self.content
}
else:
return {
'source': {
'gcs_image_uri': self.source
}
}
@property
def content(self):
"""Base64 encoded image content.
:rtype: str
:returns: Base64 encoded image bytes.
"""
return self._content
@property
def source(self):
"""Google Cloud Storage URI.
:rtype: str
:returns: String of Google Cloud Storage URI.
"""
return self._source
def _detect_annotation(self, features):
"""Generic method for detecting annotations.
:type features: list
:param features: List of :class:`~google.cloud.vision.feature.Feature`
indicating the type of annotations to perform.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.entity.EntityAnnotation`,
:class:`~google.cloud.vision.face.Face`,
:class:`~google.cloud.vision.color.ImagePropertiesAnnotation`,
:class:`~google.cloud.vision.sage.SafeSearchAnnotation`,
"""
results = self.client.annotate(self, features)
return Annotations.from_api_repr(results)
def detect(self, features):
"""Detect multiple feature types.
:type features: list of :class:`~google.cloud.vision.feature.Feature`
:param features: List of the ``Feature`` indication the type of
annotation to perform.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.entity.EntityAnnotation`.
"""
return self._detect_annotation(features)
def detect_faces(self, limit=10):
"""Detect faces in image.
:type limit: int
:param limit: The number of faces to try and detect.
:rtype: list
:returns: List of :class:`~google.cloud.vision.face.Face`.
"""
features = [Feature(FeatureTypes.FACE_DETECTION, limit)]
annotations = self._detect_annotation(features)
return annotations.faces
def detect_labels(self, limit=10):
"""Detect labels that describe objects in an image.
:type limit: int
:param limit: The maximum number of labels to try and detect.
:rtype: list
:returns: List of :class:`~google.cloud.vision.entity.EntityAnnotation`
"""
features = [Feature(FeatureTypes.LABEL_DETECTION, limit)]
annotations = self._detect_annotation(features)
return annotations.labels
def detect_landmarks(self, limit=10):
"""Detect landmarks in an image.
:type limit: int
:param limit: The maximum number of landmarks to find.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.entity.EntityAnnotation`.
"""
features = [Feature(FeatureTypes.LANDMARK_DETECTION, limit)]
annotations = self._detect_annotation(features)
return annotations.landmarks
def detect_logos(self, limit=10):
"""Detect logos in an image.
:type limit: int
:param limit: The maximum number of logos to find.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.entity.EntityAnnotation`.
"""
features = [Feature(FeatureTypes.LOGO_DETECTION, limit)]
annotations = self._detect_annotation(features)
return annotations.logos
def detect_properties(self, limit=10):
"""Detect the color properties of an image.
:type limit: int
:param limit: The maximum number of image properties to find.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.color.ImagePropertiesAnnotation`.
"""
features = [Feature(FeatureTypes.IMAGE_PROPERTIES, limit)]
annotations = self._detect_annotation(features)
return annotations.properties
def detect_safe_search(self, limit=10):
"""Retreive safe search properties from an image.
:type limit: int
:param limit: The number of faces to try and detect.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.sage.SafeSearchAnnotation`.
"""
features = [Feature(FeatureTypes.SAFE_SEARCH_DETECTION, limit)]
annotations = self._detect_annotation(features)
return annotations.safe_searches
def detect_text(self, limit=10):
"""Detect text in an image.
:type limit: int
:param limit: The maximum instances of text to find.
:rtype: list
:returns: List of
:class:`~google.cloud.vision.entity.EntityAnnotation`.
"""
features = [Feature(FeatureTypes.TEXT_DETECTION, limit)]
annotations = self._detect_annotation(features)
return annotations.texts
|
{
"content_hash": "955530714f43f8892ce1c30db44613c3",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 80,
"avg_line_length": 32.05769230769231,
"alnum_prop": 0.6045290941811637,
"repo_name": "Fkawala/gcloud-python",
"id": "d094b6702537ff0efc9c9379cfb41c1d60d530f2",
"size": "7244",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vision/google/cloud/vision/image.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "89702"
},
{
"name": "Python",
"bytes": "3403274"
},
{
"name": "Shell",
"bytes": "7548"
}
],
"symlink_target": ""
}
|
"""
This script triggers a bug in pbzip2 (version 0.94).
"""
__author__ = "Baris Kasikci, baris.kasikci@epfl.ch"
from lib import constants
from lib.plugins import create_big_file
from lib.trigger import BaseTrigger
class Trigger(BaseTrigger):
"""
Pbzip2-specific Intel PT tracing
"""
def __init__(self):
super().__init__()
self.benchmark.pre_benchmark_run = self.pre_benchmark_run
@property
def file(self):
"""
The file used for tests
"""
return constants.ROOT_PATH + "/data/pbzip-2094/test.tar"
@property
def program(self) -> str:
"""
The program name
"""
return "pbzip-2094"
@property
def expected_failure(self) -> int:
"""
Expected failure for our bug is 139
"""
return 139
@property
def failure_cmd(self) -> str:
"""
The command to run when we want to trigger the bug
"""
return "{} -k -f -p2 {}".format(self.conf.get_executable(), self.file)
@property
def success_cmd(self) -> str:
"""
The command to run when we don't want to trigger the bug. Here the bug is input independent, hence we send back
self.failure_cmd
"""
return self.failure_cmd
def pre_benchmark_run(self) -> None:
"""
For benchmarking purpose, we need a much bigger file for this. Let's create one and replace it in the command
"""
path = create_big_file(2048)
self.cmd = self.cmd.replace(self.file, path)
|
{
"content_hash": "f5d3cf9eaf066f1a2dc4e7ffff13682d",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 119,
"avg_line_length": 25.35483870967742,
"alnum_prop": 0.5820610687022901,
"repo_name": "dslab-epfl/bugbase",
"id": "015074248b8d81f6919b8c0872c2f42e7edb6dae",
"size": "1611",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/pbzip-2094/trigger.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "185"
},
{
"name": "PHP",
"bytes": "241"
},
{
"name": "Python",
"bytes": "211897"
},
{
"name": "Shell",
"bytes": "1792"
}
],
"symlink_target": ""
}
|
from model.contact import Contact
def test_modify_first_contact(app):
if app.contact.count_of_contact() == 0:
app.contact.create_new(Contact(firstname="testContact"))
app.contact.modify_first_contact(Contact(firstname="dupa1"))
|
{
"content_hash": "f4b0d4cadeda37077ead3e60606304de",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 64,
"avg_line_length": 35.142857142857146,
"alnum_prop": 0.7276422764227642,
"repo_name": "Lucas1985/kurs_pythona",
"id": "629a727800e543d645f24927e8ec8db4ba9b4169",
"size": "271",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_modify_contact.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "11601"
}
],
"symlink_target": ""
}
|
import fixtures
import mock
from nova import context
from nova.image import glance
from nova import objects
from nova import test
from nova.tests.unit import fake_instance
import nova.tests.unit.image.fake
from nova.tests.unit import utils
from nova.tests.unit.virt.vmwareapi import fake as vmwareapi_fake
from nova.tests.unit.virt.vmwareapi import stubs
from nova.tests import uuidsentinel
from nova.virt import fake
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
class ConfigDriveTestCase(test.NoDBTestCase):
REQUIRES_LOCKING = True
@mock.patch.object(objects.Service, 'get_by_compute_host')
@mock.patch.object(driver.VMwareVCDriver, '_register_openstack_extension')
def setUp(self, mock_register, mock_service):
super(ConfigDriveTestCase, self).setUp()
vm_util.vm_refs_cache_reset()
self.context = context.RequestContext('fake', 'fake', is_admin=False)
self.flags(cluster_name='test_cluster',
host_ip='testhostname',
host_username='test_username',
host_password='test_pass',
use_linked_clone=False, group='vmware')
self.flags(enabled=False, group='vnc')
vmwareapi_fake.reset()
stubs.set_stubs(self)
nova.tests.unit.image.fake.stub_out_image_service(self)
self.conn = driver.VMwareVCDriver(fake.FakeVirtAPI)
self.network_info = utils.get_test_network_info()
self.node_name = self.conn._nodename
image_ref = nova.tests.unit.image.fake.get_valid_image_id()
instance_values = {
'vm_state': 'building',
'project_id': 'fake',
'user_id': 'fake',
'name': '1',
'kernel_id': '1',
'ramdisk_id': '1',
'mac_addresses': [{'address': 'de:ad:be:ef:be:ef'}],
'memory_mb': 8192,
'flavor': objects.Flavor(vcpus=4, extra_specs={}),
'instance_type_id': 0,
'vcpus': 4,
'root_gb': 80,
'image_ref': image_ref,
'host': 'fake_host',
'task_state': 'scheduling',
'reservation_id': 'r-3t8muvr0',
'id': 1,
'uuid': uuidsentinel.foo,
'node': self.node_name,
'metadata': [],
'expected_attrs': ['system_metadata'],
}
self.test_instance = fake_instance.fake_instance_obj(self.context,
**instance_values)
self.test_instance.flavor = objects.Flavor(vcpus=4, memory_mb=8192,
root_gb=80,
ephemeral_gb=0, swap=0,
extra_specs={})
(image_service, image_id) = glance.get_remote_image_service(context,
image_ref)
metadata = image_service.show(context, image_id)
self.image = objects.ImageMeta.from_dict({
'id': image_ref,
'disk_format': 'vmdk',
'size': int(metadata['size']),
})
class FakeInstanceMetadata(object):
def __init__(self, instance, content=None, extra_md=None,
network_info=None, request_context=None):
pass
def metadata_for_config_drive(self):
return []
self.useFixture(fixtures.MonkeyPatch(
'nova.api.metadata.base.InstanceMetadata',
FakeInstanceMetadata))
def fake_make_drive(_self, _path):
pass
# We can't actually make a config drive v2 because ensure_tree has
# been faked out
self.stub_out('nova.virt.configdrive.ConfigDriveBuilder.make_drive',
fake_make_drive)
def fake_upload_iso_to_datastore(iso_path, instance, **kwargs):
pass
self.stub_out('nova.virt.vmwareapi.images.upload_iso_to_datastore',
fake_upload_iso_to_datastore)
def tearDown(self):
super(ConfigDriveTestCase, self).tearDown()
vmwareapi_fake.cleanup()
nova.tests.unit.image.fake.FakeImageService_reset()
@mock.patch.object(vmops.VMwareVMOps, '_get_instance_metadata',
return_value='fake_metadata')
def _spawn_vm(self, fake_get_instance_meta,
injected_files=None, admin_password=None,
block_device_info=None):
injected_files = injected_files or []
self.conn.spawn(self.context, self.test_instance, self.image,
injected_files=injected_files,
admin_password=admin_password,
network_info=self.network_info,
block_device_info=block_device_info)
@mock.patch.object(vmops.VMwareVMOps, '_create_config_drive',
return_value=('[ds1] fake.iso'))
@mock.patch.object(vmops.VMwareVMOps, '_attach_cdrom_to_vm')
def test_create_vm_with_config_drive_verify_method_invocation(self,
mock_attach_cdrom, mock_create_config_drive):
self.test_instance.config_drive = 'True'
self._spawn_vm()
mock_create_config_drive.assert_called_once_with(mock.ANY,
self.test_instance,
mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY,
mock.ANY)
mock_attach_cdrom.assert_called_once_with(mock.ANY, mock.ANY,
mock.ANY, mock.ANY)
@mock.patch.object(vmops.VMwareVMOps, '_create_config_drive',
return_value=('[ds1] fake.iso'))
@mock.patch.object(vmops.VMwareVMOps, '_attach_cdrom_to_vm')
def test_create_vm_without_config_drive(self, mock_attach_cdrom,
mock_create_config_drive):
self.test_instance.config_drive = None
self._spawn_vm()
mock_create_config_drive.assert_not_called()
mock_attach_cdrom.assert_not_called()
def test_create_vm_with_config_drive(self):
self.test_instance.config_drive = 'True'
self._spawn_vm()
|
{
"content_hash": "dc127804a8b0fc6fa45fbc6684ca7a57",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 79,
"avg_line_length": 43.251612903225805,
"alnum_prop": 0.533263723150358,
"repo_name": "rajalokan/nova",
"id": "b280c9c7e7c9303b5aae12cf1b01a3cdcf342f93",
"size": "7344",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/tests/unit/virt/vmwareapi/test_configdrive.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "601"
},
{
"name": "PHP",
"bytes": "4503"
},
{
"name": "Python",
"bytes": "19100322"
},
{
"name": "Shell",
"bytes": "26793"
},
{
"name": "Smarty",
"bytes": "299237"
}
],
"symlink_target": ""
}
|
import logging
import os
import sys
# pylint: disable=wrong-import-position
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from deep_qa import score_dataset_with_ensemble, compute_accuracy
from deep_qa.common.checks import ensure_pythonhashseed_set
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def main():
usage = 'USAGE: run_ensemble.py [param_file]+ -- [data_file]+'
try:
separator_index = sys.argv.index('--')
except ValueError:
print(usage)
sys.exit(-1)
param_files = sys.argv[1:separator_index]
dataset_files = sys.argv[separator_index + 1:]
predictions, labels = score_dataset_with_ensemble(param_files, dataset_files)
compute_accuracy(predictions, labels)
if __name__ == "__main__":
ensure_pythonhashseed_set()
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
level=logging.INFO)
main()
|
{
"content_hash": "fc2fc2be7cc7f198eeaee3a646ba15e2",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 86,
"avg_line_length": 32.06666666666667,
"alnum_prop": 0.6621621621621622,
"repo_name": "allenai/deep_qa",
"id": "c5b74f6d5ee9b1deb0092f4ecd4bc2b4e266438e",
"size": "962",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "scripts/run_ensemble.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "792559"
},
{
"name": "Shell",
"bytes": "4730"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, division, unicode_literals
import re
import sys
import time
import threading
from math import ceil
import six
if six.PY2:
from backports.shutil_get_terminal_size import get_terminal_size
input = raw_input
else:
from shutil import get_terminal_size
from builtins import input
import colorama
colorama.init()
last_output_lines = 0
overflow_flag = False
is_atty = sys.stdout.isatty()
cursor_up = "\x1b[1A"
widths = [
(126, 1), (159, 0), (687, 1), (710, 0), (711, 1),
(727, 0), (733, 1), (879, 0), (1154, 1), (1161, 0),
(4347, 1), (4447, 2), (7467, 1), (7521, 0), (8369, 1),
(8426, 0), (9000, 1), (9002, 2), (11021, 1), (12350, 2),
(12351, 1), (12438, 2), (12442, 0), (19893, 2), (19967, 1),
(55203, 2), (63743, 1), (64106, 2), (65039, 1), (65059, 0),
(65131, 2), (65279, 1), (65376, 2), (65500, 1), (65510, 2),
(120831, 1), (262141, 2), (1114109, 1),
]
def get_char_width(char):
global widths
o = ord(char)
if o == 0xe or o == 0xf:
return 0
for num, wid in widths:
if o <= num:
return wid
return 1
def width_cal_preprocess(content):
"""
此函数同时删除 ANSI escape code,避免影响行宽计算
This function also remove ANSI escape code to avoid the influence on line width calculation
"""
ptn = re.compile(r'(\033|\x1b)\[.*?m', re.I)
_content = re.sub(ptn, '', content) # remove ANSI escape code
return _content
def preprocess(content):
"""
对输出内容进行预处理,转为str类型 (py3),并替换行内\r\t\n等字符为空格
do pre-process to the content, turn it into str (for py3), and replace \r\t\n with space
"""
if six.PY2:
if not isinstance(content, unicode):
if isinstance(content, str):
_content = unicode(content, encoding=sys.stdin.encoding)
elif isinstance(content, int):
_content = unicode(content)
else:
_content = content
assert isinstance(_content, unicode)
elif six.PY3:
_content = str(content)
_content = re.sub(r'\r|\t|\n', ' ', _content)
return _content
def cut_off_at(content, width):
if line_width(content) > width:
now = content[:width]
while line_width(now) > width:
now = now[:-1]
now += "$" * (width - line_width(now))
return now
else:
return content
def print_line(content, columns, force_single_line):
padding = " " * ((columns - line_width(content)) % columns)
output = "{content}{padding}".format(content=content, padding=padding)
if force_single_line:
output = cut_off_at(output, columns)
# here use extra space to unify the cursor position when printing
# full-width line, which is different in windows cmd and others
print(output, end=' \b')
sys.stdout.flush()
def line_width(line):
"""
计算本行在输出到命令行后所占的宽度
calculate the width of output in terminal
"""
if six.PY2:
assert isinstance(line, unicode)
_line = width_cal_preprocess(line)
result = sum(map(get_char_width, _line))
return result
def lines_of_content(content, width):
"""
计算内容在特定输出宽度下实际显示的行数
calculate the actual rows with specific terminal width
"""
result = 0
if isinstance(content, list):
for line in content:
_line = preprocess(line)
result += ceil(line_width(_line) / width)
elif isinstance(content, dict):
for k, v in content.items():
# 加2是算上行内冒号和空格的宽度
# adding 2 for the for the colon and space ": "
_k, _v = map(preprocess, (k, v))
result += ceil((line_width(_k) + line_width(_v) + 2) / width)
for line in content.sublist:
_line = preprocess(line)
result += ceil(line_width(_line) / width)
return int(result)
def print_multi_line(content, force_single_line, sort_key):
"""
'sort_key' 参数只在 dict 模式时有效
'sort_key' parameter only available in 'dict' mode
"""
global last_output_lines
global overflow_flag
global is_atty
if not is_atty:
if isinstance(content, list):
for line in content:
print(line)
elif isinstance(content, dict):
for k, v in sorted(content.items(), key=sort_key):
print("{}: {}".format(k, v))
else:
raise TypeError("Excepting types: list, dict. Got: {}".format(type(content)))
return
columns, rows = get_terminal_size()
lines = lines_of_content(content, columns)
if force_single_line is False and lines > rows:
overflow_flag = True
elif force_single_line is True and len(content) > rows:
overflow_flag = True
# 确保初始输出位置是位于最左处的
# to make sure the cursor is at the left most
print("\b" * columns, end="")
if isinstance(content, list):
for line in content:
_line = preprocess(line)
print_line(_line, columns, force_single_line)
elif isinstance(content, dict):
for k, v in sorted(content.items(), key=sort_key):
_k, _v = map(preprocess, (k, v))
print_line("{}: {}".format(_k, _v), columns, force_single_line)
for line in content.sublist:
_line = preprocess(line)
print_line(_line, columns, force_single_line)
else:
raise TypeError("Excepting types: list, dict. Got: {}".format(type(content)))
# 输出额外的空行来清除上一次输出的剩余内容
# do extra blank lines to wipe the remaining of last output
print(" " * columns * (last_output_lines - lines), end=" \b")
# 回到初始输出位置
# back to the origin pos
print(cursor_up * (max(last_output_lines, lines)), end="")
sys.stdout.flush()
last_output_lines = lines
class output:
class SignalList(list):
def __init__(self, parent, obj):
super(output.SignalList, self).__init__(obj)
self.parent = parent
self.lock = threading.Lock()
def __setitem__(self, key, value):
global is_atty
with self.lock:
super(output.SignalList, self).__setitem__(key, value)
if not is_atty:
print("{}".format(value))
else:
self.parent.refresh(int(time.time()*1000), forced=False)
def clear(self):
global is_atty
# with self.lock: In all places you call clear, you actually already have the lock
if six.PY2:
self[:] = []
elif six.PY3:
super(output.SignalList, self).clear()
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
def change(self, newlist):
with self.lock:
self.clear()
self.extend(newlist)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
def append(self, x):
global is_atty
with self.lock:
super(output.SignalList, self).append(x)
if not is_atty:
print("{}".format(x))
else:
self.parent.refresh(int(time.time()*1000), forced=False)
def insert(self, i, x):
global is_atty
with self.lock:
super(output.SignalList, self).insert(i, x)
if not is_atty:
print("{}".format(x))
else:
self.parent.refresh(int(time.time()*1000), forced=False)
def remove(self, x):
global is_atty
with self.lock:
super(output.SignalList, self).remove(x)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
def pop(self, i=-1):
global is_atty
with self.lock:
rs = super(output.SignalList, self).pop(i)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
return rs
def sort(self, *args, **kwargs):
global is_atty
with self.lock:
super(output.SignalList, self).sort(*args, **kwargs)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
class SignalDict(dict):
def __init__(self, parent, obj):
super(output.SignalDict, self).__init__(obj)
self.parent = parent
self.lock = threading.Lock()
self.sublist = []
def change(self, newlist):
with self.lock:
self.clear()
super(output.SignalDict, self).update(newlist)
self.parent.refresh(int(time.time()*1000), forced=False)
def __setitem__(self, key, value):
global is_atty
with self.lock:
super(output.SignalDict, self).__setitem__(key, value)
if not is_atty:
print("{}: {}".format(key, value))
else:
self.parent.refresh(int(time.time()*1000), forced=False)
def clear(self):
global is_atty
# with self.lock: In all places you call clear, you actually already have the lock
super(output.SignalDict, self).clear()
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
def pop(self, *args, **kwargs):
global is_atty
with self.lock:
rs = super(output.SignalDict, self).pop(*args, **kwargs)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
return rs
def popitem(self, *args, **kwargs):
global is_atty
with self.lock:
rs = super(output.SignalDict, self).popitem(*args, **kwargs)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
return rs
def setdefault(self, *args, **kwargs):
global is_atty
with self.lock:
rs = super(output.SignalDict, self).setdefault(*args, **kwargs)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
return rs
def update(self, *args, **kwargs):
global is_atty
with self.lock:
super(output.SignalDict, self).update(*args, **kwargs)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
def append(self, x):
global is_atty
with self.lock:
self.sublist.append(x)
if is_atty:
self.parent.refresh(int(time.time()*1000), forced=False)
def __init__(self, output_type="list", initial_len=1, interval=0, force_single_line=False, no_warning=False, sort_key=lambda x:x[0]):
self.sort_key = sort_key
self.no_warning = no_warning
no_warning and print("All reprint warning diabled.")
global is_atty
# reprint does not work in the IDLE terminal, and any other environment that can't get terminal_size
if is_atty and not all(get_terminal_size()):
if not no_warning:
r = input("Fail to get terminal size, we got {}, continue anyway? (y/N)".format(get_terminal_size()))
if not (r and isinstance(r, str) and r.lower()[0] in ['y','t','1']):
sys.exit(0)
is_atty = False
if output_type == "list":
self.warped_obj = output.SignalList(self, [''] * initial_len)
elif output_type == "dict":
self.warped_obj = output.SignalDict(self, {})
self.interval = interval
self.force_single_line = force_single_line
self._last_update = int(time.time()*1000)
def refresh(self, new_time=0, forced=True):
if new_time - self._last_update >= self.interval or forced:
print_multi_line(self.warped_obj, self.force_single_line, sort_key=self.sort_key)
self._last_update = new_time
def __enter__(self):
global is_atty
if not is_atty:
if not self.no_warning:
print("Not in terminal, reprint now using normal build-in print function.")
return self.warped_obj
def __exit__(self, exc_type, exc_val, exc_tb):
global is_atty
self.refresh(forced=True)
if is_atty:
columns, _ = get_terminal_size()
if self.force_single_line:
print('\n' * len(self.warped_obj), end="")
else:
print('\n' * lines_of_content(self.warped_obj, columns), end="")
global last_output_lines
global overflow_flag
last_output_lines = 0
if overflow_flag:
if not self.no_warning:
print("Detected that the lines of output has been exceeded the height of terminal windows, which \
caused the former output remained and keep adding new lines.")
print("检测到输出过程中, 输出行数曾大于命令行窗口行数, 这会导致输出清除不完整, 而使输出不停增长。请注意控制输出行数。")
|
{
"content_hash": "fef54fd16b8634015f59036f507b7022",
"timestamp": "",
"source": "github",
"line_count": 391,
"max_line_length": 137,
"avg_line_length": 34.19948849104859,
"alnum_prop": 0.5461411905474125,
"repo_name": "Yinzo/reprint",
"id": "746f8c24bcac59b8f7cd9e4fa639d69bfb312c0f",
"size": "13790",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reprint/reprint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13660"
}
],
"symlink_target": ""
}
|
from __future__ import annotations
import collections
from contextlib import contextmanager
from dataclasses import dataclass
import functools as ft
import glob
import json
import itertools as it
import os
import time
import Box2D as B
import IPython.display
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import torch as T
from . import render
State = collections.namedtuple('State', ('x', 'y', 'a', 'dx', 'dy', 'da'))
@dataclass(frozen=True)
class Outcome:
kind: str
success: bool
duration: float
def to_json(self):
return dict(kind=self.kind, success=self.success, duration=self.duration)
class Game:
def __init__(self, seed=None):
# Settings
self.timestep = 0.01
self.thrust = 15
self.hwidth = 0.4
self.hheight = 2
self.max_time = 20
# Transient
self.elapsed_steps = 0
self.elapsed_time = 0.0
self.control = (False, False)
random = np.random.RandomState(seed) # pylint: disable=no-member
# Box2D/hover.render
self.world = B.b2World(gravity=(0, -10))
self.ground = self.world.CreateStaticBody(
position=[0, -10],
shapes=B.b2PolygonShape(box=(50, 10)),
)
self.rocket = self.world.CreateDynamicBody(
position=[0, 15],
angle=1.0 * (random.rand()-0.5)
)
w = self.hwidth
h = self.hheight
t = 2 * self.hwidth
self.rocket.CreatePolygonFixture(
vertices=[
(-2*w, -h),
(2*w, -h),
(w, t-h),
(-w, t-h),
],
density=1,
friction=1,
)
self.rocket.CreatePolygonFixture(
vertices=[
(-w, t-h),
(w, t-h),
(w, h-w),
(0, h),
(-w, h-w),
],
density=1,
friction=1,
)
d = 2 * self.hwidth
self.left_thruster_shape = render.PolygonShape(
color='orange',
vertices=(
(-2*w, -h),
(-w, -h-d),
(0, -h),
))
self.right_thruster_shape = render.PolygonShape(
color='orange',
vertices=(
(0, -h),
(w, -h-d),
(2*w, -h),
))
@staticmethod
def _convert_body(body, color, extra_shapes=()):
return render.Body(
x=body.position.x,
y=body.position.y,
angle=body.angle,
shapes=tuple(
render.PolygonShape(vertices=tuple(fixture.shape.vertices), color=color)
for fixture in body.fixtures
) + tuple(extra_shapes)
)
def draw(self):
"""Render the game to svg.
returns -- string -- SVG
"""
ground = self._convert_body(self.ground, 'black')
rocket = self._convert_body(
self.rocket, 'blue',
((self.left_thruster_shape,) if self.control[0] else ()) +
((self.right_thruster_shape,) if self.control[1] else ()))
return render.draw(
render.Scene(bounds=(-30, 30, -1, 29),
width=800,
bodies=(ground, rocket)))
def _repr_html_(self):
return self.draw()
@property
def state(self):
"""Returns the State tuple, that describes the rocket."""
position = self.rocket.position
angle = self.rocket.angle
dposition = self.rocket.linearVelocity
dangle = self.rocket.angularVelocity
return State(position.x, position.y, angle, dposition.x, dposition.y, dangle)
def _in_bounds(self):
position = self.rocket.position
angle = self.rocket.angle
return abs(position.x) < 20 and 4 <= position.y < 25 and abs(angle) < 1.5
def step(self, control):
"""Take a single step in the game.
control -- (float, float) -- (fire_left, fire_right) -- thrusters
returns -- Outcome|None -- outcome of the game, if finished
"""
self.control = control
thrust_v = self.rocket.GetWorldVector([0, self.rocket.mass * self.thrust])
if control[0]:
self.rocket.ApplyForce(thrust_v, self.rocket.GetWorldPoint([-self.hwidth, -self.hheight]), True)
if control[1]:
self.rocket.ApplyForce(thrust_v, self.rocket.GetWorldPoint([self.hwidth, -self.hheight]), True)
self.world.Step(self.timestep, 5, 5)
self.elapsed_steps += 1
self.elapsed_time += self.timestep
if self.max_time <= self.elapsed_time:
return Outcome('timeout', True, self.elapsed_time)
if not self._in_bounds():
return Outcome('outofbounds', False, self.elapsed_time)
def step_multi(self, control, ticks):
"""Take multiple steps with the same control input.
returns -- Outcome|None
"""
for _ in range(ticks):
outcome = self.step(control)
if outcome:
return outcome
@classmethod
def play(cls, agent, steps_per_control=1):
"""Play a complete game and return the outcome."""
game = cls()
control = agent(game.state)
while True:
outcome = game.step(control)
if outcome:
return outcome
if game.elapsed_steps % steps_per_control == 0:
control = agent(game.state)
@classmethod
def play_and_display(cls, agent, steps_per_render=10, steps_per_control=1):
"""Render a game in IPython, as updating HTML."""
game = cls()
display = IPython.display.display(game, display_id=True)
control = agent(game.state)
while True:
for _ in range(steps_per_render):
outcome = game.step(control)
if outcome:
return outcome
if game.elapsed_steps % steps_per_control == 0:
control = agent(game.state)
display.update(game)
time.sleep(game.timestep * steps_per_render)
class Report:
################################################################################
# Saving
@staticmethod
def _evaluate_one(agent):
return Game.play(agent).to_json()
@staticmethod
@contextmanager
def _mapper(nproc):
if nproc == 1:
yield map
else:
with T.multiprocessing.Pool(nproc) as pool:
yield pool.map
@staticmethod
def _open_write(path, mode='w'):
parent = os.path.dirname(path)
if not os.path.isdir(parent):
os.makedirs(parent)
return open(path, mode)
@classmethod
def about(cls, path, name, kind, **args):
with cls._open_write(os.path.join(path, 'about.json')) as file:
json.dump(dict(name=name, kind=kind, **args), file)
@classmethod
def test(cls, path, agent, ngames, nproc=T.multiprocessing.cpu_count()):
with cls._mapper(nproc) as mapper, \
cls._open_write(os.path.join(path, 'test.jsonl')) as file:
for result in mapper(cls._evaluate_one, it.repeat(agent, ngames)):
json.dump(result, file)
file.write('\n')
@classmethod
def agent(cls, path, agent):
with cls._open_write(os.path.join(path, 'agent.pkl'), 'wb') as file:
T.save(agent, file)
class Training:
def __init__(self, root):
self._root = root
self._logs = {}
self._t0 = time.time()
if not os.path.isdir(root):
os.makedirs(root)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
for file in self._logs.values():
file.close()
def append(self, name, **row):
if name not in self._logs:
self._logs[name] = open(os.path.join(self._root, name + '.jsonl'), 'w')
log = self._logs[name]
json.dump(dict(t=time.time()-self._t0, **row), log)
log.write('\n')
@classmethod
def training(cls, path):
return cls.Training(os.path.join(path, 'training'))
################################################################################
# Loading
@classmethod
def load(cls, root):
parts = []
keys = set([])
for about_path in glob.glob(os.path.join(root, '**/about.json')):
df = pd.read_json(os.path.join(os.path.dirname(about_path), 'test.jsonl'),
lines=True)
with open(about_path) as f:
about = json.load(f)
keys |= about.keys()
for key, value in about.items():
df[key] = value
parts.append(df)
keys = ['kind', 'name'] + list(sorted(keys - {'kind', 'name'}))
return cls(pd.concat(parts), keys)
def __init__(self, data, keys):
self.data = data
self.keys = keys
def _repr_html_(self):
# *1 is a trick to convert booleans to numeric
return (self.data * 1).groupby(list(self.keys)).mean()._repr_html_()
def plot_duration(self):
plt.figure(figsize=(10, 6))
bins = np.logspace(np.floor(np.log10(self.data.duration.min())),
np.ceil(np.log10(self.data.duration.max())),
num=40)
names = sorted(set(self.data.name))
for name in names:
sns.distplot(self.data.duration[self.data.name == name], kde=False, bins=bins)
plt.gca().set_xscale('log')
plt.legend(names)
plt.title('Game duration')
class IntegratorAgent:
"""Turn a continuous agent into a PWM discrete agent (suitable for the game)."""
def __init__(self, agent):
self.agent = agent
self._left = 0
self._right = 0
def __call__(self, state):
ltarget, rtarget = self.agent(state)
self._left += ltarget
self._right += rtarget
left = (1 <= self._left)
right = (1 <= self._right)
self._left -= left
self._right -= right
return left, right
def _constant_agent(left, right, state):
return (left, right)
def constant_agent(left, right):
"""An agent that always returns the same action."""
return ft.partial(_constant_agent, left, right)
|
{
"content_hash": "c7276333f846c369432df4899f536c32",
"timestamp": "",
"source": "github",
"line_count": 341,
"max_line_length": 108,
"avg_line_length": 31.173020527859236,
"alnum_prop": 0.5269049858889934,
"repo_name": "DouglasOrr/Snippets",
"id": "f6d45a337b71b11d08ea0c0ea9e59aa5d4025812",
"size": "10630",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hover/hover/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "61098"
},
{
"name": "CSS",
"bytes": "2839"
},
{
"name": "Clojure",
"bytes": "42894"
},
{
"name": "Dockerfile",
"bytes": "3894"
},
{
"name": "HTML",
"bytes": "17302"
},
{
"name": "Haml",
"bytes": "2454"
},
{
"name": "Haskell",
"bytes": "277"
},
{
"name": "Java",
"bytes": "127511"
},
{
"name": "JavaScript",
"bytes": "12117"
},
{
"name": "Jupyter Notebook",
"bytes": "33198"
},
{
"name": "Python",
"bytes": "137390"
},
{
"name": "Ruby",
"bytes": "8897"
},
{
"name": "Rust",
"bytes": "32172"
},
{
"name": "Shell",
"bytes": "3263"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1alpha1RoleBindingList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, api_version=None, items=None, kind=None, metadata=None):
"""
V1alpha1RoleBindingList - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'api_version': 'str',
'items': 'list[V1alpha1RoleBinding]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
self.attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
self._api_version = api_version
self._items = items
self._kind = kind
self._metadata = metadata
@property
def api_version(self):
"""
Gets the api_version of this V1alpha1RoleBindingList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1alpha1RoleBindingList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1alpha1RoleBindingList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1alpha1RoleBindingList.
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""
Gets the items of this V1alpha1RoleBindingList.
Items is a list of RoleBindings
:return: The items of this V1alpha1RoleBindingList.
:rtype: list[V1alpha1RoleBinding]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1alpha1RoleBindingList.
Items is a list of RoleBindings
:param items: The items of this V1alpha1RoleBindingList.
:type: list[V1alpha1RoleBinding]
"""
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`")
self._items = items
@property
def kind(self):
"""
Gets the kind of this V1alpha1RoleBindingList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1alpha1RoleBindingList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1alpha1RoleBindingList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1alpha1RoleBindingList.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1alpha1RoleBindingList.
Standard object's metadata.
:return: The metadata of this V1alpha1RoleBindingList.
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1alpha1RoleBindingList.
Standard object's metadata.
:param metadata: The metadata of this V1alpha1RoleBindingList.
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1alpha1RoleBindingList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "94e19edcd75c7cff4e50aa2acd96ddf7",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 281,
"avg_line_length": 32.07692307692308,
"alnum_prop": 0.5961630695443645,
"repo_name": "djkonro/client-python",
"id": "bd3e4efc0f9ac4f3fc5042a33a690f5d623dd205",
"size": "6272",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1alpha1_role_binding_list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6527154"
},
{
"name": "Shell",
"bytes": "16522"
}
],
"symlink_target": ""
}
|
"""
The deprecated lazy command objects
"""
import warnings
from libqtile.command import base, client, graph, interface
from libqtile.lazy import LazyCommandInterface
__all__ = [
"lazy",
"base",
"client",
"graph",
"interface",
]
class _LazyTree(client.InteractiveCommandClient):
def __getattr__(self, name: str) -> client.InteractiveCommandClient:
"""Get the child element of the currently selected object"""
warnings.warn(
"libqtile.command.lazy is deprecated, use libqtile.lazy.lazy", DeprecationWarning
)
return super().__getattr__(name)
lazy = _LazyTree(LazyCommandInterface())
|
{
"content_hash": "427e80b55dc1210be1d9da4d70987174",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 93,
"avg_line_length": 23.464285714285715,
"alnum_prop": 0.6712328767123288,
"repo_name": "qtile/qtile",
"id": "1618a214afc8ba6c0fbd3b73a0bbb437a6bd3e50",
"size": "1772",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "libqtile/command/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "625"
},
{
"name": "Python",
"bytes": "2202676"
},
{
"name": "Shell",
"bytes": "8090"
}
],
"symlink_target": ""
}
|
"""These are flows designed to discover information about the host."""
from grr.lib import aff4
from grr.lib import artifact
from grr.lib import flow
from grr.lib import rdfvalue
from grr.lib import worker
from grr.proto import flows_pb2
class EnrolmentInterrogateEvent(flow.EventListener):
"""An event handler which will schedule interrogation on client enrollment."""
EVENTS = ["ClientEnrollment"]
well_known_session_id = rdfvalue.SessionID("aff4:/flows/CA:Interrogate")
# We only accept messages that came from the CA flows.
sourcecheck = lambda source: source.Basename().startswith("CA:")
@flow.EventHandler(source_restriction=sourcecheck)
def ProcessMessage(self, message=None, event=None):
_ = message
flow.GRRFlow.StartFlow(client_id=event, flow_name="Interrogate",
queue=worker.DEFAULT_ENROLLER_QUEUE,
token=self.token)
class InterrogateArgs(rdfvalue.RDFProtoStruct):
protobuf = flows_pb2.InterrogateArgs
class Interrogate(flow.GRRFlow):
"""Interrogate various things about the host."""
category = "/Administrative/"
client = None
args_type = InterrogateArgs
behaviours = flow.GRRFlow.behaviours + "BASIC"
@flow.StateHandler(next_state=["Hostname",
"Platform",
"InstallDate",
"EnumerateInterfaces",
"EnumerateFilesystems",
"ClientInfo",
"ClientConfig",
"ClientConfiguration"])
def Start(self):
"""Start off all the tests."""
self.state.Register("summary", rdfvalue.ClientSummary(
client_id=self.client_id))
# Create the objects we need to exist.
self.Load()
fd = aff4.FACTORY.Create(self.client.urn.Add("network"), "Network",
token=self.token)
fd.Close()
self.CallClient("GetPlatformInfo", next_state="Platform")
self.CallClient("GetInstallDate", next_state="InstallDate")
self.CallClient("GetClientInfo", next_state="ClientInfo")
# Support both new and old clients.
self.CallClient("GetConfig", next_state="ClientConfig")
self.CallClient("GetConfiguration", next_state="ClientConfiguration")
self.CallClient("EnumerateInterfaces", next_state="EnumerateInterfaces")
self.CallClient("EnumerateFilesystems", next_state="EnumerateFilesystems")
def Load(self):
# Ensure there is a client object
self.client = aff4.FACTORY.Open(self.client_id,
mode="rw", token=self.token)
def Save(self):
# Make sure the client object is removed and closed
if self.client:
self.client.Close()
self.client = None
@flow.StateHandler(next_state=["ProcessKnowledgeBase"])
def Platform(self, responses):
"""Stores information about the platform."""
if responses.success:
response = responses.First()
self.state.summary.system_info = response
# These need to be in separate attributes because they get searched on in
# the GUI
self.client.Set(self.client.Schema.HOSTNAME(response.node))
self.client.Set(self.client.Schema.SYSTEM(response.system))
self.client.Set(self.client.Schema.OS_RELEASE(response.release))
self.client.Set(self.client.Schema.OS_VERSION(response.version))
self.client.Set(self.client.Schema.FQDN(response.fqdn))
# response.machine is the machine value of platform.uname()
# On Windows this is the value of:
# HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session
# Manager\Environment\PROCESSOR_ARCHITECTURE
# "AMD64", "IA64" or "x86"
self.client.Set(self.client.Schema.ARCH(response.machine))
self.client.Set(self.client.Schema.UNAME("%s-%s-%s" % (
response.system, response.release, response.version)))
self.client.Flush()
else:
self.Log("Could not retrieve Platform info.")
if self.client.Get(self.client.Schema.SYSTEM):
self.CallFlow("KnowledgeBaseInitializationFlow",
next_state="ProcessKnowledgeBase")
else:
self.Log("Unknown system type, skipping KnowledgeBaseInitializationFlow")
@flow.StateHandler()
def InstallDate(self, responses):
if responses.success:
response = responses.First()
install_date = self.client.Schema.INSTALL_DATE(
response.integer * 1000000)
self.client.Set(install_date)
self.state.summary.install_date = install_date
else:
self.Log("Could not get InstallDate")
@flow.StateHandler()
def ProcessKnowledgeBase(self, responses):
"""Update the SUMMARY from the knowledgebase data."""
if not responses.success:
raise flow.FlowError("Error collecting artifacts: %s" % responses.status)
knowledge_base = artifact.GetArtifactKnowledgeBase(self.client)
for kbuser in knowledge_base.users:
self.state.summary.users.Append(
rdfvalue.User().FromKnowledgeBaseUser(kbuser))
@flow.StateHandler()
def EnumerateInterfaces(self, responses):
"""Enumerates the interfaces."""
if responses.success and responses:
net_fd = aff4.FACTORY.Create(self.client.urn.Add("network"), "Network",
token=self.token)
interface_list = net_fd.Schema.INTERFACES()
mac_addresses = []
for response in responses:
interface_list.Append(response)
# Add a hex encoded string for searching
if (response.mac_address and
response.mac_address != "\x00" * len(response.mac_address)):
mac_addresses.append(response.mac_address.human_readable_address)
self.client.Set(self.client.Schema.MAC_ADDRESS(
"\n".join(mac_addresses)))
net_fd.Set(net_fd.Schema.INTERFACES, interface_list)
net_fd.Close()
self.state.summary.interfaces = interface_list
else:
self.Log("Could not enumerate interfaces.")
@flow.StateHandler()
def EnumerateFilesystems(self, responses):
"""Store all the local filesystems in the client."""
if responses.success and len(responses):
filesystems = self.client.Schema.FILESYSTEM()
for response in responses:
filesystems.Append(response)
if response.type == "partition":
(device, offset) = response.device.rsplit(":", 1)
offset = int(offset)
pathspec = rdfvalue.PathSpec(
path=device, pathtype=rdfvalue.PathSpec.PathType.OS,
offset=offset)
pathspec.Append(path="/",
pathtype=rdfvalue.PathSpec.PathType.TSK)
urn = self.client.PathspecToURN(pathspec, self.client.urn)
fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
fd.Set(fd.Schema.PATHSPEC(pathspec))
fd.Close()
continue
if response.device:
# Create the raw device
urn = "devices/%s" % response.device
pathspec = rdfvalue.PathSpec(
path=response.device,
pathtype=rdfvalue.PathSpec.PathType.OS)
pathspec.Append(path="/",
pathtype=rdfvalue.PathSpec.PathType.TSK)
fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
fd.Set(fd.Schema.PATHSPEC(pathspec))
fd.Close()
# Create the TSK device
urn = self.client.PathspecToURN(pathspec, self.client.urn)
fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
fd.Set(fd.Schema.PATHSPEC(pathspec))
fd.Close()
if response.mount_point:
# Create the OS device
pathspec = rdfvalue.PathSpec(
path=response.mount_point,
pathtype=rdfvalue.PathSpec.PathType.OS)
urn = self.client.PathspecToURN(pathspec, self.client.urn)
fd = aff4.FACTORY.Create(urn, "VFSDirectory", token=self.token)
fd.Set(fd.Schema.PATHSPEC(pathspec))
fd.Close()
self.client.Set(self.client.Schema.FILESYSTEM, filesystems)
else:
self.Log("Could not enumerate file systems.")
@flow.StateHandler()
def ClientInfo(self, responses):
"""Obtain some information about the GRR client running."""
if responses.success:
response = responses.First()
self.state.summary.client_info = response
self.client.Set(self.client.Schema.CLIENT_INFO(response))
self.client.AddLabels(response.labels)
self.state.summary.client_info = response
else:
self.Log("Could not get ClientInfo.")
@flow.StateHandler()
def ClientConfig(self, responses):
"""Process client config."""
if responses.success:
response = responses.First()
self.client.Set(self.client.Schema.GRR_CONFIG(response))
@flow.StateHandler()
def ClientConfiguration(self, responses):
"""Process client config."""
if responses.success:
response = responses.First()
self.client.Set(self.client.Schema.GRR_CONFIGURATION(response))
@flow.StateHandler()
def End(self):
"""Finalize client registration."""
self.Notify("Discovery", self.client.urn, "Client Discovery Complete")
# Publish this client to the Discovery queue.
self.state.summary.timestamp = rdfvalue.RDFDatetime().Now()
self.Publish("Discovery", self.state.summary)
self.SendReply(self.state.summary)
self.client.Set(self.client.Schema.SUMMARY, self.state.summary)
# Flush the data to the data store.
self.client.Close()
|
{
"content_hash": "897f154ec63ceca09d15419354aaa999",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 80,
"avg_line_length": 36.44106463878327,
"alnum_prop": 0.6535893155258765,
"repo_name": "simsong/grr-insider",
"id": "21b8e798235f139eb8a420af7b09fd592530f051",
"size": "9606",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/flows/general/discovery.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "C++",
"bytes": "55149"
},
{
"name": "CSS",
"bytes": "36308"
},
{
"name": "JavaScript",
"bytes": "679269"
},
{
"name": "Python",
"bytes": "3553249"
},
{
"name": "Shell",
"bytes": "30813"
}
],
"symlink_target": ""
}
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='gisela',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.1',
description='Simple time tracking service',
long_description=long_description,
# The project's main homepage.
url='https://github.com/toirl/gisela',
# Author details
author='Torsten Irländer',
author_email='torsten.irlaender@googlemail.com',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='time tracking micro service tags tagging',
test_suite="tests",
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=[]),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['bottle-sqlalchemy', 'bottle'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': [],
'test': ['webtest'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
'sample': [],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'gisela=gisela.server:main',
],
},
)
|
{
"content_hash": "201bd85397f06733a126efe74e60763c",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 94,
"avg_line_length": 34.40869565217391,
"alnum_prop": 0.6611068991660348,
"repo_name": "toirl/gisela",
"id": "59a43e4dad3122e38b990509c9dffff1dfaff4bb",
"size": "3999",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "API Blueprint",
"bytes": "7469"
},
{
"name": "HTML",
"bytes": "1977"
},
{
"name": "JavaScript",
"bytes": "1083"
},
{
"name": "Python",
"bytes": "27817"
}
],
"symlink_target": ""
}
|
from libs import elastic_libs
import csv
from datetime import datetime
def get_users_tweets(screen_name, time_range="7d"):
query = {
"from": 0,
"size": 1000,
"sort": [
{"@timestamp": {"order": "desc"}}
],
"query": {
"bool": {
"must": [
{"range": {"@timestamp": {"gte": "now-{}".format(time_range), "lte": "now"}}},
]
},
}
}
results = elastic_libs.run_query(query)
return results
def report_to_csv():
results = get_users_tweets("")
tweets = results['hits']['hits']
with open('tweets.csv', 'w') as csvfile:
field_names = ['timestamp', 'screen_name', 'in_reply_to', 'text', 'urls']
writer = csv.DictWriter(csvfile, fieldnames=field_names)
writer.writeheader()
for tweet in tweets:
content = tweet['_source']
if content['user']['screen_name'] == "":
timestamp = datetime.fromtimestamp(int((content['timestamp_ms']))/1000)
screenname = content['user']['screen_name']
in_reply_to = content['in_reply_to_screen_name']
text = content['text']
url_entities = content['entities']['urls']
if len(url_entities) > 0:
real_url = (url_entities[0]['expanded_url'])
else:
real_url = ""
row_dict = {'timestamp': timestamp, 'screen_name': screenname, 'in_reply_to': in_reply_to, 'text': text, 'urls': real_url}
writer.writerow(row_dict)
|
{
"content_hash": "a54f70f4414837d7a72aba2c1dc5d75a",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 138,
"avg_line_length": 31.980392156862745,
"alnum_prop": 0.4978540772532189,
"repo_name": "gradiuscypher/internet_illithid",
"id": "9ffab9a86e56f92b2e6c82178333802de05aa5a5",
"size": "1631",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mimic/libs/reporting.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15979"
},
{
"name": "Dockerfile",
"bytes": "580"
},
{
"name": "HTML",
"bytes": "10342"
},
{
"name": "JavaScript",
"bytes": "115546"
},
{
"name": "Python",
"bytes": "39339"
}
],
"symlink_target": ""
}
|
import datetime
import os
from pathlib import Path
from unittest import mock
import pytest
from cumulusci.core.config import OrgConfig
from metaci.build.models import Build
from metaci.conftest import (
BranchFactory,
BuildFactory,
BuildFlowFactory,
PlanFactory,
PlanRepositoryFactory,
PlanScheduleFactory,
RepositoryFactory,
ScratchOrgInstanceFactory,
)
from metaci.release.models import Release
@pytest.mark.django_db
class TestBuild:
def test_empty_build_init(self):
Build()
def test_scheduled_build_init(self):
repo = RepositoryFactory()
branch = BranchFactory(name="branch", repo=repo)
schedule = PlanScheduleFactory(branch=branch)
plan = PlanFactory()
planrepo = PlanRepositoryFactory(plan=plan, repo=repo)
build = Build(
repo=branch.repo,
plan=plan,
branch=branch,
commit="shashasha",
schedule=schedule,
build_type="scheduled",
)
assert build.planrepo == planrepo
def test_planrepo_find_on_build_init(self):
repo = RepositoryFactory()
plan = PlanFactory()
planrepo = PlanRepositoryFactory(plan=plan, repo=repo)
build = Build(repo=repo, plan=plan)
assert build.planrepo == planrepo
@mock.patch("metaci.repository.models.Repository.get_github_api")
@mock.patch("metaci.cumulusci.keychain.MetaCIProjectKeychain.get_org")
def test_run(self, get_org, get_gh_api):
# mock github zipball
def archive(format, zip_content, ref):
with open(Path(__file__).parent / "testproject.zip", "rb") as f:
zip_content.write(f.read())
mock_api = mock.Mock()
mock_api.archive.side_effect = archive
get_gh_api.return_value = mock_api
# mock org config
org_config = OrgConfig({}, "test")
org_config.refresh_oauth_token = mock.Mock()
get_org.return_value = org_config
build = BuildFactory()
build.plan.flows = "test"
try:
build.run()
finally:
detach_logger(build)
assert build.status == "success", build.log
assert "Build flow test completed successfully" in build.log
assert "running test flow" in build.flows.get().log
def test_delete_org(self):
build = BuildFactory()
build.org_instance = ScratchOrgInstanceFactory(org__repo=build.repo)
build.org_instance.delete_org = mock.Mock()
org_config = OrgConfig({"scratch": True}, "dev")
build.delete_org(org_config)
build.org_instance.delete_org.assert_called_once()
detach_logger(build)
def test_delete_org__not_scratch(self):
build = BuildFactory()
build.org_instance = ScratchOrgInstanceFactory(org__repo=build.repo)
build.org_instance.delete_org = mock.Mock()
org_config = OrgConfig({}, "dev")
build.delete_org(org_config)
build.org_instance.delete_org.assert_not_called()
detach_logger(build)
def test_delete_org__keep_org(self):
build = BuildFactory(keep_org=True)
org = ScratchOrgInstanceFactory()
org.delete_org = mock.Mock()
build.org_instance = org
org_config = OrgConfig({"scratch": True}, "dev")
build.delete_org(org_config)
org.delete_org.assert_not_called()
detach_logger(build)
def test_delete_org__keep_org_on_error(self):
build = BuildFactory(status="error")
build.plan.keep_org_on_error = True
org = ScratchOrgInstanceFactory()
org.delete_org = mock.Mock()
build.org_instance = org
org_config = OrgConfig({"scratch": True}, "dev")
build.delete_org(org_config)
org.delete_org.assert_not_called()
detach_logger(build)
def test_delete_org__keep_org_on_fail(self):
build = BuildFactory(status="fail")
build.plan.keep_org_on_fail = True
org = ScratchOrgInstanceFactory()
org.delete_org = mock.Mock()
build.org_instance = org
org_config = OrgConfig({"scratch": True}, "dev")
build.delete_org(org_config)
org.delete_org.assert_not_called()
detach_logger(build)
@mock.patch.dict(os.environ, clear=True)
def test_no_worker_id(self):
build = BuildFactory()
assert not build.worker_id
@mock.patch.dict(os.environ, {"DYNO": "faker.1"})
def test_dyno_worker_id(self):
build = BuildFactory()
assert build.worker_id == "faker.1"
def test_get_commit(self):
build = BuildFactory()
commit_sha = build.commit
truncated_commit = build.get_commit()
assert f"{commit_sha[:8]}" == truncated_commit
@pytest.mark.django_db
class TestBuildFlow:
def test_set_commit_status(self):
build_flow = BuildFlowFactory()
build_flow.build.plan.commit_status_template = "{{ 2 + 2 }}"
build_flow.flow_instance = mock.Mock()
build_flow.set_commit_status()
assert build_flow.build.commit_status == "4"
def test_get_flow_options(self):
build_flow = BuildFlowFactory()
build_flow.build.plan.role = "release"
build_flow.build.release = Release(repo=RepositoryFactory())
options = build_flow._get_flow_options()
assert options["github_release_notes"]["sandbox_date"] == datetime.date.today()
assert options["github_release_notes"][
"production_date"
] == datetime.date.today() + datetime.timedelta(days=6)
def detach_logger(model):
for handler in model.logger.handlers:
model.logger.removeHandler(handler)
|
{
"content_hash": "9dcb597c9e1d2fb8a06bbb42265ea7bd",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 87,
"avg_line_length": 33.174418604651166,
"alnum_prop": 0.6291622853137049,
"repo_name": "SalesforceFoundation/mrbelvedereci",
"id": "b394dff0b92cdd0475664f0060fa9652eaf79079",
"size": "5706",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metaci/build/tests/test_models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2069"
},
{
"name": "HTML",
"bytes": "123214"
},
{
"name": "JavaScript",
"bytes": "3993"
},
{
"name": "Python",
"bytes": "245560"
},
{
"name": "Shell",
"bytes": "4590"
}
],
"symlink_target": ""
}
|
import unittest
import sys
import time
import os
from robot.utils.asserts import *
from robot.errors import *
from robot.running.timeouts import TestTimeout, KeywordTimeout
# thread_resources is here
sys.path.append(os.path.join(os.path.dirname(__file__),'..','utils'))
from thread_resources import passing, failing, sleeping, returning, MyException
class VariableMock(object):
def replace_string(self, string):
return string
class TestInit(unittest.TestCase):
def test_no_params(self):
self._verify_tout(TestTimeout())
def test_timeout_string(self):
for tout_str, exp_str, exp_secs in [ ('1s', '1 second', 1),
('10 sec', '10 seconds', 10),
('2h 1minute', '2 hours 1 minute', 7260),
('42', '42 seconds', 42) ]:
self._verify_tout(TestTimeout(tout_str), exp_str, exp_secs)
def test_invalid_timeout_string(self):
for inv in ['invalid', '1s 1']:
for params in [ [inv], [inv,'whatever'] ]:
tout = TestTimeout(*params)
err = "Setting test timeout failed: Invalid time string '%s'"
self._verify_tout(tout, str=inv, secs=0.000001, err=err % inv)
def _verify_tout(self, tout, str='', secs=-1, err=None):
tout.replace_variables(VariableMock())
assert_equals(tout.string, str)
assert_equals(tout.secs, secs)
assert_equals(tout.error, err)
class TestTimer(unittest.TestCase):
def test_time_left(self):
tout = TestTimeout('1s', variables=VariableMock())
tout.start()
assert_true(tout.time_left() > 0.9)
time.sleep(0.2)
assert_true(tout.time_left() < 0.9)
def test_timed_out_with_no_timeout(self):
tout = TestTimeout(variables=VariableMock())
tout.start()
time.sleep(0.01)
assert_false(tout.timed_out())
def test_timed_out_with_non_exceeded_timeout(self):
tout = TestTimeout('10s', variables=VariableMock())
tout.start()
time.sleep(0.01)
assert_false(tout.timed_out())
def test_timed_out_with_exceeded_timeout(self):
tout = TestTimeout('1ms', variables=VariableMock())
tout.start()
time.sleep(0.02)
assert_true(tout.timed_out())
class TestComparisons(unittest.TestCase):
def test_compare_when_none_timeouted(self):
touts = self._create_timeouts([''] * 10)
assert_equals(min(touts).string, '')
assert_equals(max(touts).string, '')
def test_compare_when_all_timeouted(self):
touts = self._create_timeouts(['1min','42seconds','43','1h1min','99'])
assert_equals(min(touts).string, '42 seconds')
assert_equals(max(touts).string, '1 hour 1 minute')
def test_compare_with_timeouted_and_non_timeouted(self):
touts = self._create_timeouts(['','1min','42sec','','43','1h1m','99',''])
assert_equals(min(touts).string, '42 seconds')
assert_equals(max(touts).string, '')
def test_that_compare_uses_starttime(self):
touts = self._create_timeouts(['1min','42seconds','43','1h1min','99'])
touts[2].starttime -= 2
assert_equals(min(touts).string, '43 seconds')
assert_equals(max(touts).string, '1 hour 1 minute')
def _create_timeouts(self, tout_strs):
touts = []
for tout_str in tout_strs:
touts.append(TestTimeout(tout_str, variables=VariableMock()))
touts[-1].start()
return touts
class TestRun(unittest.TestCase):
def setUp(self):
self.tout = TestTimeout('1s', variables=VariableMock())
self.tout.start()
def test_passing(self):
assert_none(self.tout.run(passing))
def test_returning(self):
for arg in [ 10, 'hello', ['l','i','s','t'], unittest]:
ret = self.tout.run(returning, args=(arg,))
assert_equals(ret, arg)
def test_failing(self):
assert_raises_with_msg(MyException, 'hello world',
self.tout.run, failing, ('hello world',))
if sys.platform.startswith('java'):
def test_java_failing(self):
from java.lang import Error
from thread_resources import java_failing
assert_raises_with_msg(Error, 'java.lang.Error: hi tellus',
self.tout.run, java_failing, ('hi tellus',))
def test_sleeping(self):
assert_equals(self.tout.run(sleeping, args=(0.01,)), 0.01)
def test_method_executed_normally_if_no_timeout(self):
os.environ['ROBOT_THREAD_TESTING'] = 'initial value'
self.tout.run(sleeping, (0.05,))
assert_equals(os.environ['ROBOT_THREAD_TESTING'], '0.05')
def test_method_stopped_if_timeout(self):
os.environ['ROBOT_THREAD_TESTING'] = 'initial value'
self.tout.secs = 0.001
# PyThreadState_SetAsyncExc thrown exceptions are not guaranteed
# to occur in a specific timeframe ,, thus the actual Timeout exception
# maybe thrown too late in Windows.
# This is why we need to have an action that really will take some time (sleep 5 secs)
# to (almost) ensure that the 'ROBOT_THREAD_TESTING' setting is not executed before
# timeout exception occurs
assert_raises_with_msg(TimeoutError, 'Test timeout 1 second exceeded.',
self.tout.run, sleeping, (5,))
assert_equals(os.environ['ROBOT_THREAD_TESTING'], 'initial value')
def test_zero_and_negative_timeout(self):
for tout in [0, 0.0, -0.01, -1, -1000]:
self.tout.time_left = lambda: tout
assert_raises(TimeoutError, self.tout.run, sleeping, (10,))
def test_customized_message(self):
tout = KeywordTimeout('1s', 'My message', VariableMock())
tout.start()
tout.run(passing)
tout.secs = 0.001
assert_raises_with_msg(TimeoutError, 'My message',
tout.run, sleeping, (10,))
class TestMessage(unittest.TestCase):
def test_non_active(self):
assert_equal(TestTimeout().get_message(), 'Test timeout not active.')
def test_active(self):
tout = KeywordTimeout('42s', variables=VariableMock())
tout.start()
msg = tout.get_message()
assert_true(msg.startswith('Keyword timeout 42 seconds active.'), msg)
assert_true(msg.endswith('seconds left.'), msg)
def test_failed_default(self):
tout = TestTimeout('1s', variables=VariableMock())
tout.starttime = time.time() - 2
assert_equal(tout.get_message(), 'Test timeout 1 second exceeded.')
def test_failed_custom(self):
tout = KeywordTimeout('1s', 'Custom message', VariableMock())
tout.starttime = time.time() - 2
assert_equal(tout.get_message(), 'Custom message')
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "cc817d705276e443013683091519f871",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 94,
"avg_line_length": 36.78947368421053,
"alnum_prop": 0.6034334763948498,
"repo_name": "Senseg/robotframework",
"id": "fd32c32eefea9c7cf0c6ba47916de0f76c7c6c7d",
"size": "6990",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utest/running/test_timeouts.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "716"
},
{
"name": "Java",
"bytes": "48873"
},
{
"name": "JavaScript",
"bytes": "149654"
},
{
"name": "Python",
"bytes": "1637427"
},
{
"name": "Shell",
"bytes": "1323"
}
],
"symlink_target": ""
}
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Bookmark'
db.create_table(u'xadmin_bookmark', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=128)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('url_name', self.gf('django.db.models.fields.CharField')(max_length=64)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
('query', self.gf('django.db.models.fields.CharField')(max_length=1000, blank=True)),
('is_share', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'xadmin', ['Bookmark'])
# Adding model 'UserSettings'
db.create_table(u'xadmin_usersettings', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('key', self.gf('django.db.models.fields.CharField')(max_length=256)),
('value', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xadmin', ['UserSettings'])
# Adding model 'UserWidget'
db.create_table(u'xadmin_userwidget', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('page_id', self.gf('django.db.models.fields.CharField')(max_length=256)),
('widget_type', self.gf('django.db.models.fields.CharField')(max_length=50)),
('value', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'xadmin', ['UserWidget'])
def backwards(self, orm):
# Deleting model 'Bookmark'
db.delete_table(u'xadmin_bookmark')
# Deleting model 'UserSettings'
db.delete_table(u'xadmin_usersettings')
# Deleting model 'UserWidget'
db.delete_table(u'xadmin_userwidget')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'xadmin.bookmark': {
'Meta': {'object_name': 'Bookmark'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_share': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'query': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'url_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'xadmin.usersettings': {
'Meta': {'object_name': 'UserSettings'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {})
},
u'xadmin.userwidget': {
'Meta': {'object_name': 'UserWidget'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_id': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'value': ('django.db.models.fields.TextField', [], {}),
'widget_type': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['xadmin']
|
{
"content_hash": "51b5be6988dbadedd42b0ea4c3d2a829",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 195,
"avg_line_length": 63.27350427350427,
"alnum_prop": 0.566392003241929,
"repo_name": "cgcgbcbc/django-xadmin",
"id": "bacc01b5360338cba27050fc610456eec5d95e0d",
"size": "7427",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xadmin/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "23710"
},
{
"name": "HTML",
"bytes": "95406"
},
{
"name": "JavaScript",
"bytes": "66145"
},
{
"name": "Python",
"bytes": "417242"
}
],
"symlink_target": ""
}
|
import os
import json
import sys
def main():
if len(sys.argv) != 3:
print('Usage: write_labels.py labels_json out_dir', file=sys.stderr)
sys.exit(1)
labels_by_target = json.loads(sys.argv[1])
out = sys.argv[2]
for target_name, labels in labels_by_target.items():
with open(os.path.join(out, target_name + '.labels'), 'w') as f:
f.write('\n'.join(labels))
if __name__ == '__main__':
main()
|
{
"content_hash": "c02647f07ba5960f7eafe1c7575a71de",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 72,
"avg_line_length": 22.210526315789473,
"alnum_prop": 0.6184834123222749,
"repo_name": "FeliciaLim/oss-fuzz",
"id": "338ac716f3bfd4a549dc5a23ddc2cf3a18d3be9a",
"size": "442",
"binary": false,
"copies": "1",
"ref": "refs/heads/opus",
"path": "infra/base-images/base-builder/write_labels.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "7113"
},
{
"name": "C++",
"bytes": "29021"
},
{
"name": "Groovy",
"bytes": "8689"
},
{
"name": "HTML",
"bytes": "603"
},
{
"name": "Python",
"bytes": "25585"
},
{
"name": "Shell",
"bytes": "70002"
}
],
"symlink_target": ""
}
|
import logging
import datetime
import time
from modularodm import Q
from oauthlib.oauth2 import OAuth2Error
from dateutil.relativedelta import relativedelta
from framework.celery_tasks import app as celery_app
from scripts import utils as scripts_utils
from website.app import init_app
from website.addons.box.model import Box
from website.addons.googledrive.model import GoogleDriveProvider
from website.addons.mendeley.model import Mendeley
from website.oauth.models import ExternalAccount
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
PROVIDER_CLASSES = (Box, GoogleDriveProvider, Mendeley, )
def look_up_provider(addon_short_name):
for Provider in PROVIDER_CLASSES:
if Provider.short_name == addon_short_name:
return Provider
return None
def get_targets(delta, addon_short_name):
# NOTE: expires_at is the access_token's expiration date,
# NOT the refresh token's
return ExternalAccount.find(
Q('expires_at', 'lt', datetime.datetime.utcnow() - delta) &
Q('provider', 'eq', addon_short_name)
)
def main(delta, Provider, rate_limit, dry_run):
allowance = rate_limit[0]
last_call = time.time()
for record in get_targets(delta, Provider.short_name):
if Provider(record).has_expired_credentials:
logger.info(
'Found expired record {}, skipping'.format(record.__repr__())
)
continue
logger.info(
'Refreshing tokens on record {0}; expires at {1}'.format(
record.__repr__(),
record.expires_at.strftime('%c')
)
)
if not dry_run:
if allowance < 1:
try:
time.sleep(rate_limit[1] - (time.time() - last_call))
except ValueError:
pass # ValueError indicates a negative sleep time
allowance = rate_limit[0]
allowance -= 1
last_call = time.time()
success = False
try:
success = Provider(record).refresh_oauth_key(force=True)
except OAuth2Error as e:
logger.error(e)
else:
logger.info(
'Status of record {}: {}'.format(
record.__repr__(),
'SUCCESS' if success else 'FAILURE')
)
@celery_app.task(name='scripts.refresh_addon_tokens')
def run_main(addons=None, rate_limit=(5, 1), dry_run=True):
"""
:param dict addons: of form {'<addon_short_name>': int(<refresh_token validity duration in days>)}
:param tuple rate_limit: of form (<requests>, <seconds>). Default is five per second
"""
init_app(set_backends=True, routes=False)
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
for addon in addons:
try:
days = int(addons[addon]) - 3 # refresh tokens that expire this in the next three days
except (ValueError, TypeError):
days = 11 # OAuth2 spec's default refresh token expiry time is 14 days
delta = relativedelta(days=days)
Provider = look_up_provider(addon)
if not Provider:
logger.error('Unable to find Provider class for addon {}'.format(addon_short_name))
else:
main(delta, Provider, rate_limit, dry_run=dry_run)
|
{
"content_hash": "e88aa484bcee6285fcb9fec5bbd2ab74",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 102,
"avg_line_length": 35.21649484536083,
"alnum_prop": 0.6086065573770492,
"repo_name": "mluke93/osf.io",
"id": "5df1b3e605e0b4600b5e7f793b6536538c47f4f3",
"size": "3457",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "scripts/refresh_addon_tokens.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "157414"
},
{
"name": "HTML",
"bytes": "110361"
},
{
"name": "JavaScript",
"bytes": "1625329"
},
{
"name": "Mako",
"bytes": "619022"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "5649842"
}
],
"symlink_target": ""
}
|
class Handler(object):
"""
"""
def __init__(self):
pass
def return_service_details():
pass
|
{
"content_hash": "4f849ce6e0ad25851ccedcc55876b93a",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 33,
"avg_line_length": 12.6,
"alnum_prop": 0.47619047619047616,
"repo_name": "epheo/shaddock",
"id": "350e446a189cbbdbc169a76d8db1a92f591aa1f3",
"size": "827",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shaddock/handler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "588"
},
{
"name": "Jinja",
"bytes": "2283"
},
{
"name": "Python",
"bytes": "58659"
},
{
"name": "Shell",
"bytes": "4129"
}
],
"symlink_target": ""
}
|
"""Tests for the default Windows Registry plugin."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import winreg as winreg_formatter
from plaso.parsers.winreg_plugins import default
from plaso.parsers.winreg_plugins import test_lib
from plaso.winreg import test_lib as winreg_test_lib
class TestDefaultRegistry(test_lib.RegistryPluginTestCase):
"""Tests for the default Windows Registry plugin."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._plugin = default.DefaultPlugin()
def testProcess(self):
"""Tests the Process function."""
key_path = u'\\Microsoft\\Some Windows\\InterestingApp\\MRU'
values = []
values.append(winreg_test_lib.TestRegValue(
'MRUList', 'acb'.encode('utf_16_le'), 1, 123))
values.append(winreg_test_lib.TestRegValue(
'a', 'Some random text here'.encode('utf_16_le'), 1, 1892))
values.append(winreg_test_lib.TestRegValue(
'b', 'c:/evil.exe'.encode('utf_16_le'), 3, 612))
values.append(winreg_test_lib.TestRegValue(
'c', 'C:/looks_legit.exe'.encode('utf_16_le'), 1, 1001))
winreg_key = winreg_test_lib.TestRegKey(
key_path, 1346145829002031, values, 1456)
event_generator = self._ParseKeyWithPlugin(self._plugin, winreg_key)
event_objects = self._GetEventObjects(event_generator)
self.assertEquals(len(event_objects), 1)
event_object = event_objects[0]
self.assertEquals(event_object.timestamp, 1346145829002031)
expected_msg = (
u'[{0:s}] '
u'MRUList: [REG_SZ] acb '
u'a: [REG_SZ] Some random text here '
u'b: [REG_BINARY] '
u'c: [REG_SZ] C:/looks_legit.exe').format(key_path)
expected_msg_short = (
u'[{0:s}] MRUList: [REG_SZ] acb a: [REG_SZ...').format(key_path)
self._TestGetMessageStrings(event_object, expected_msg, expected_msg_short)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "b1183452af829464db6bfd20a4d2b0ab",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 79,
"avg_line_length": 33.6551724137931,
"alnum_prop": 0.6680327868852459,
"repo_name": "iwm911/plaso",
"id": "9989058a221bafc6d07395d08fe9b58ad9a76cc1",
"size": "2650",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plaso/parsers/winreg_plugins/default_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2431825"
},
{
"name": "Shell",
"bytes": "21885"
},
{
"name": "VHDL",
"bytes": "2100224"
}
],
"symlink_target": ""
}
|
from sklearn.ensemble import RandomForestClassifier
def get_estimator():
clf = RandomForestClassifier(n_estimators=1, max_leaf_nodes=2,
random_state=61)
return clf
|
{
"content_hash": "d14fb5756ac931b7164d218a6b89db7d",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 66,
"avg_line_length": 29.571428571428573,
"alnum_prop": 0.6473429951690821,
"repo_name": "paris-saclay-cds/ramp-workflow",
"id": "661d88c2b720a0567c7e407453898f451f6c0577",
"size": "207",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "rampwf/tests/kits/iris_data_label/submissions/starting_kit/estimator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "115957"
},
{
"name": "Makefile",
"bytes": "369"
},
{
"name": "Python",
"bytes": "354774"
},
{
"name": "Shell",
"bytes": "3960"
}
],
"symlink_target": ""
}
|
"""\
=========================
Abstract SMTP Mailer Core
=========================
This component effectively forms the skeleton of an SMTP server. It expects
an SMTP client to connect and send various SMTP requests to it. This basic
SMTP Mailer Core however, does not actually do anything in response to any
of the SMTP commands it expects.
Each SMTP command is actually given a dummy callback which more customised
SMTP protocol handlers are expected to override. Beyond this, this component
is expected to be used as a protocol handler for ServerCore.
Fundamentally, this component handles the command/response structure of SMTP
fairly directly, but expects the brains of the protocol to be implemented by
a more intelligent subclass.
Example Usage
-------------
Whilst this will work to a minimal extent::
ServerCore(protocol=MailHandler, port=1025)
This will not actually form a very interesting SMTP, nor SMTP compliant,
server since whilst it will tell you commands it doesn't understand, it will
not do anything interesting.
You are as noted expected to subclass MailHandler. For a better example
of how to subclass MailHandler you are suggested to look at
Kamaelia.Apps.ConcreteMailHandler.ConcreteMailHandler
Note
----
This component is not complete - you are expected to subclass it to finish
it off as you need. Specifically it does not implement the following:
- It does not enforce "this command followed by that command"
- It does not actually do anything with any DATA a client sends you
- It neither performs local mail delivery nor proxying - you'd need to implement this yourself.
How does it work?
-----------------
The component is expected to be connected to a client TCP connection by
ServerCore, such that messages from the network arrive on inbox "inbox", and
outgoing messages get sent to outbox "outbox"
The component will terminate if any of these is true:
- The client breaks the connection
- One of the methods sets self.breakConnection to True.
- If a "socketShutdown" message arrives on inbox "control"
The main() method divides the connection into effectively two main states:
- accepting random commands prior to getting a DATA command
- accepting the email during a DATA command
SMTP commands are specifically dispatched to a particular handler for that
command. In this component none of the handlers do anything interesting.
Configuration
-------------
The abstract mailer supports some basic config settings:
- logfile - path/filename where requests should get logged
- debuglogfile - path/filename to where the debug log file should do.
Methods you are expected to override
------------------------------------
Whilst you are probably better off subclassing ConcreteMailHandler, you will
probably need to override the following methods in a subclass if you
subclass MailHandler directly.
- handleConnect(self)
- handleHelo(self,command)
- handleEhlo(self,command)
- handleMail(self,command)
- handleRcpt(self,command)
- handleData(self,command)
- handleQuit(self,command)
- handleRset(self,command)
- handleNoop(self,command)
- handleVrfy(self,command)
- handleHelp(self,command)
- logResult(self)
- handleDisconnect(self)
"""
import Axon
from Axon.Ipc import producerFinished, WaitComplete
from Kamaelia.IPC import socketShutdown
class MailHandler(Axon.Component.component):
logfile = "greylist.log"
debuglogfile = "greylist-debug.log"
def __init__(self,**argd):
super(MailHandler, self).__init__(**argd)
self.inbox_log = []
self.line = None
def logging_recv_connection(self):
self.line = self.recv("inbox")
self.inbox_log.append(self.line)
def getline(self):
control_message = ""
while 1:
while not self.anyReady():
self.pause(); # print "PAUSING", repr(self.inbox_log), repr(self.line)
yield 1
while self.dataReady("control"):
control_message = self.recv("control")
if isinstance(control_message, socketShutdown):
self.client_connected = False
if self.dataReady("inbox"):
self.logging_recv_connection()
return
else:
if not self.client_connected :
self.breakConnection = True
return
yield 1
def handleCommand(self,command):
if len(command) < 1:
self.netPrint("500 Sorry we don't like broken mailers")
self.breakConnection = True
return
if command[0] == "HELO": return self.handleHelo(command) # RFC 2821 4.5.1 required
if command[0] == "EHLO": return self.handleEhlo(command) # RFC 2821 4.5.1 required
if command[0] == "MAIL": return self.handleMail(command) # RFC 2821 4.5.1 required
if command[0] == "RCPT": return self.handleRcpt(command) # RFC 2821 4.5.1 required
if command[0] == "DATA": return self.handleData(command) # RFC 2821 4.5.1 required
if command[0] == "QUIT": return self.handleQuit(command) # RFC 2821 4.5.1 required
if command[0] == "RSET": return self.handleRset(command) # RFC 2821 4.5.1 required
if command[0] == "NOOP": return self.handleNoop(command) # RFC 2821 4.5.1 required
if command[0] == "VRFY": return self.handleVrfy(command) # RFC 2821 4.5.1 required
if command[0] == "HELP": return self.handleHelp(command)
self.netPrint("500 Sorry we don't like broken mailers")
self.breakConnection = True
def noteToLog(self, line):
try:
x = open(self.logfile,"a")
except IOError:
x = open(self.logfile,"w")
x.write(line+"\n")
x.flush()
x.close()
def noteToDebugLog(self, line):
try:
x = open(self.debuglogfile,"a")
except IOError:
x = open(self.debuglogfile,"w")
x.write(line+"\n")
x.flush()
x.close()
def netPrint(self, *args):
for i in args:
self.noteToDebugLog(i)
self.send(i+"\r\n", "outbox")
def handleConnect(self): pass
def handleHelo(self,command): pass
def handleEhlo(self,command): pass
def handleMail(self,command): pass
def handleRcpt(self,command): pass
def handleData(self,command): pass
def handleQuit(self,command): pass
def handleRset(self,command): pass
def handleNoop(self,command): pass
def handleVrfy(self,command): pass
def handleHelp(self,command): pass
def logResult(self): pass
def handleDisconnect(self): yield 1
def lastline(self):
if self.line == ".\r\n":
return True
if len(self.line) >=5:
if self.line[-5:] == "\r\n.\r\n":
return True
if len(self.line) >=5:
if self.line[-5:] == "\r\n.\r\n":
return True
if len(self.line) >=4:
if self.line[-4:] == "\n.\r\n":
return True
return False
def main(self):
brokenClient = False
self.handleConnect()
self.gettingdata = False
self.client_connected = True
self.breakConnection = False
while (not self.gettingdata) and (not self.breakConnection):
yield WaitComplete(self.getline(), tag="_getline1")
try:
command = self.line.split()
except AttributeError:
brokenClient = True
break
self.handleCommand(command)
if not brokenClient:
if (not self.breakConnection):
EndOfMessage = False
self.netPrint('354 Enter message, ending with "." on a line by itself')
while not EndOfMessage:
yield WaitComplete(self.getline(), tag="getline2")
if self.lastline():
EndOfMessage = True
self.netPrint("250 OK id-deferred")
self.send(producerFinished(),"signal")
if not brokenClient:
yield WaitComplete(self.handleDisconnect(),tag="_handleDisconnect")
self.logResult()
__kamaelia_components__ = ( MailHandler, )
|
{
"content_hash": "b94ccdf8893d05b1ac1e3f0f8db14404",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 96,
"avg_line_length": 34.36666666666667,
"alnum_prop": 0.6418525703200776,
"repo_name": "sparkslabs/kamaelia",
"id": "423fe97d2916817ee994ccd788fb7fac68b34fc9",
"size": "9204",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Code/Python/Kamaelia/Kamaelia/Apps/Grey/MailHandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3814"
},
{
"name": "C",
"bytes": "212854"
},
{
"name": "C++",
"bytes": "327546"
},
{
"name": "CSS",
"bytes": "114434"
},
{
"name": "ChucK",
"bytes": "422"
},
{
"name": "HTML",
"bytes": "1288960"
},
{
"name": "Java",
"bytes": "31832"
},
{
"name": "JavaScript",
"bytes": "829491"
},
{
"name": "M4",
"bytes": "12224"
},
{
"name": "Makefile",
"bytes": "150947"
},
{
"name": "NSIS",
"bytes": "18867"
},
{
"name": "OCaml",
"bytes": "643"
},
{
"name": "PHP",
"bytes": "49059"
},
{
"name": "Perl",
"bytes": "504"
},
{
"name": "Processing",
"bytes": "2885"
},
{
"name": "Python",
"bytes": "18900785"
},
{
"name": "Ruby",
"bytes": "4165"
},
{
"name": "Shell",
"bytes": "707588"
}
],
"symlink_target": ""
}
|
from django import test
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.db import models
from django.db.models.fields.related import (
ForeignKey, ForeignObject, ForeignObjectRel, ManyToManyField, ManyToOneRel,
OneToOneField,
)
from .models import AllFieldsModel
NON_CONCRETE_FIELDS = (
ForeignObject,
GenericForeignKey,
GenericRelation,
)
NON_EDITABLE_FIELDS = (
models.BinaryField,
GenericForeignKey,
GenericRelation,
)
RELATION_FIELDS = (
ForeignKey,
ForeignObject,
ManyToManyField,
OneToOneField,
GenericForeignKey,
GenericRelation,
)
MANY_TO_MANY_CLASSES = {
ManyToManyField,
}
MANY_TO_ONE_CLASSES = {
ForeignObject,
ForeignKey,
GenericForeignKey,
}
ONE_TO_MANY_CLASSES = {
ForeignObjectRel,
ManyToOneRel,
GenericRelation,
}
ONE_TO_ONE_CLASSES = {
OneToOneField,
}
FLAG_PROPERTIES = (
'concrete',
'editable',
'is_relation',
'model',
'hidden',
'one_to_many',
'many_to_one',
'many_to_many',
'one_to_one',
'related_model',
)
FLAG_PROPERTIES_FOR_RELATIONS = (
'one_to_many',
'many_to_one',
'many_to_many',
'one_to_one',
)
class FieldFlagsTests(test.SimpleTestCase):
@classmethod
def setUpClass(cls):
super(FieldFlagsTests, cls).setUpClass()
cls.fields = (
list(AllFieldsModel._meta.fields) +
list(AllFieldsModel._meta.virtual_fields)
)
cls.all_fields = (
cls.fields +
list(AllFieldsModel._meta.many_to_many) +
list(AllFieldsModel._meta.virtual_fields)
)
cls.fields_and_reverse_objects = (
cls.all_fields +
list(AllFieldsModel._meta.related_objects)
)
def test_each_field_should_have_a_concrete_attribute(self):
self.assertTrue(all(f.concrete.__class__ == bool for f in self.fields))
def test_each_field_should_have_an_editable_attribute(self):
self.assertTrue(all(f.editable.__class__ == bool for f in self.all_fields))
def test_each_field_should_have_a_has_rel_attribute(self):
self.assertTrue(all(f.is_relation.__class__ == bool for f in self.all_fields))
def test_each_object_should_have_auto_created(self):
self.assertTrue(
all(f.auto_created.__class__ == bool for f in self.fields_and_reverse_objects)
)
def test_non_concrete_fields(self):
for field in self.fields:
if type(field) in NON_CONCRETE_FIELDS:
self.assertFalse(field.concrete)
else:
self.assertTrue(field.concrete)
def test_non_editable_fields(self):
for field in self.all_fields:
if type(field) in NON_EDITABLE_FIELDS:
self.assertFalse(field.editable)
else:
self.assertTrue(field.editable)
def test_related_fields(self):
for field in self.all_fields:
if type(field) in RELATION_FIELDS:
self.assertTrue(field.is_relation)
else:
self.assertFalse(field.is_relation)
def test_field_names_should_always_be_available(self):
for field in self.fields_and_reverse_objects:
self.assertTrue(field.name)
def test_all_field_types_should_have_flags(self):
for field in self.fields_and_reverse_objects:
for flag in FLAG_PROPERTIES:
self.assertTrue(hasattr(field, flag), "Field %s does not have flag %s" % (field, flag))
if field.is_relation:
true_cardinality_flags = sum(
getattr(field, flag) is True
for flag in FLAG_PROPERTIES_FOR_RELATIONS
)
# If the field has a relation, there should be only one of the
# 4 cardinality flags available.
self.assertEqual(1, true_cardinality_flags)
def test_cardinality_m2m(self):
m2m_type_fields = [
f for f in self.all_fields
if f.is_relation and f.many_to_many
]
# Test classes are what we expect
self.assertEqual(MANY_TO_MANY_CLASSES, {f.__class__ for f in m2m_type_fields})
# Ensure all m2m reverses are m2m
for field in m2m_type_fields:
reverse_field = field.remote_field
self.assertTrue(reverse_field.is_relation)
self.assertTrue(reverse_field.many_to_many)
self.assertTrue(reverse_field.related_model)
def test_cardinality_o2m(self):
o2m_type_fields = [
f for f in self.fields_and_reverse_objects
if f.is_relation and f.one_to_many
]
# Test classes are what we expect
self.assertEqual(ONE_TO_MANY_CLASSES, {f.__class__ for f in o2m_type_fields})
# Ensure all o2m reverses are m2o
for field in o2m_type_fields:
if field.concrete:
reverse_field = field.remote_field
self.assertTrue(reverse_field.is_relation and reverse_field.many_to_one)
def test_cardinality_m2o(self):
m2o_type_fields = [
f for f in self.fields_and_reverse_objects
if f.is_relation and f.many_to_one
]
# Test classes are what we expect
self.assertEqual(MANY_TO_ONE_CLASSES, {f.__class__ for f in m2o_type_fields})
# Ensure all m2o reverses are o2m
for obj in m2o_type_fields:
if hasattr(obj, 'field'):
reverse_field = obj.field
self.assertTrue(reverse_field.is_relation and reverse_field.one_to_many)
def test_cardinality_o2o(self):
o2o_type_fields = [
f for f in self.all_fields
if f.is_relation and f.one_to_one
]
# Test classes are what we expect
self.assertEqual(ONE_TO_ONE_CLASSES, {f.__class__ for f in o2o_type_fields})
# Ensure all o2o reverses are o2o
for obj in o2o_type_fields:
if hasattr(obj, 'field'):
reverse_field = obj.field
self.assertTrue(reverse_field.is_relation and reverse_field.one_to_one)
def test_hidden_flag(self):
incl_hidden = set(AllFieldsModel._meta.get_fields(include_hidden=True))
no_hidden = set(AllFieldsModel._meta.get_fields())
fields_that_should_be_hidden = (incl_hidden - no_hidden)
for f in incl_hidden:
self.assertEqual(f in fields_that_should_be_hidden, f.hidden)
def test_model_and_reverse_model_should_equal_on_relations(self):
for field in AllFieldsModel._meta.get_fields():
is_concrete_forward_field = field.concrete and field.related_model
if is_concrete_forward_field:
reverse_field = field.remote_field
self.assertEqual(field.model, reverse_field.related_model)
self.assertEqual(field.related_model, reverse_field.model)
def test_null(self):
# null isn't well defined for a ManyToManyField, but changing it to
# True causes backwards compatibility problems (#25320).
self.assertFalse(AllFieldsModel._meta.get_field('m2m').null)
self.assertTrue(AllFieldsModel._meta.get_field('reverse2').null)
|
{
"content_hash": "22b76509e97aaef47de54d062d29f5f8",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 103,
"avg_line_length": 32.762331838565025,
"alnum_prop": 0.6167533534081577,
"repo_name": "indevgr/django",
"id": "33f49a139dee117c6615bbb64cc81b847c7c5904",
"size": "7306",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/model_fields/test_field_flags.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52294"
},
{
"name": "HTML",
"bytes": "174530"
},
{
"name": "JavaScript",
"bytes": "248130"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11350632"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
from mongoengine import ValidationError
import six
from st2api.controllers import resource
from st2common.exceptions.db import StackStormDBObjectNotFoundError
from st2common import log as logging
from st2common.content import utils
from st2common.models.api.action import ActionAPI
from st2common.models.utils import action_param_utils
from st2common.persistence.action import Action
from st2common.persistence.runner import RunnerType
from st2common.rbac.types import PermissionType
from st2common.rbac import utils as rbac_utils
from st2common.router import abort
__all__ = [
'OverviewController',
'ParametersViewController',
'EntryPointController'
]
http_client = six.moves.http_client
LOG = logging.getLogger(__name__)
class LookupUtils(object):
@staticmethod
def _get_action_by_id(id):
try:
return Action.get_by_id(id)
except Exception as e:
msg = 'Database lookup for id="%s" resulted in exception. %s' % (id, e)
LOG.exception(msg)
abort(http_client.NOT_FOUND, msg)
@staticmethod
def _get_runner_by_id(id):
try:
return RunnerType.get_by_id(id)
except (ValueError, ValidationError) as e:
msg = 'Database lookup for id="%s" resulted in exception. %s' % (id, e)
LOG.exception(msg)
abort(http_client.NOT_FOUND, msg)
@staticmethod
def _get_runner_by_name(name):
try:
return RunnerType.get_by_name(name)
except (ValueError, ValidationError) as e:
msg = 'Database lookup for name="%s" resulted in exception. %s' % (id, e)
LOG.exception(msg)
abort(http_client.NOT_FOUND, msg)
class ParametersViewController(object):
def get_one(self, action_id, requester_user):
return self._get_one(action_id, requester_user=requester_user)
@staticmethod
def _get_one(action_id, requester_user):
"""
List merged action & runner parameters by action id.
Handle:
GET /actions/views/parameters/1
"""
action_db = LookupUtils._get_action_by_id(action_id)
LOG.info('Found action: %s, runner: %s', action_db, action_db.runner_type['name'])
permission_type = PermissionType.ACTION_VIEW
rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user,
resource_db=action_db,
permission_type=permission_type)
runner_db = LookupUtils._get_runner_by_name(action_db.runner_type['name'])
all_params = action_param_utils.get_params_view(
action_db=action_db, runner_db=runner_db, merged_only=True)
return {'parameters': all_params}
class OverviewController(resource.ContentPackResourceController):
model = ActionAPI
access = Action
supported_filters = {}
query_options = {
'sort': ['pack', 'name']
}
include_reference = True
def get_one(self, ref_or_id, requester_user):
"""
List action by id.
Handle:
GET /actions/views/overview/1
"""
resp = super(OverviewController, self)._get_one(ref_or_id,
requester_user=requester_user,
permission_type=PermissionType.ACTION_VIEW)
action_api = ActionAPI(**resp.json)
result = self._transform_action_api(action_api=action_api, requester_user=requester_user)
resp.json = result
return resp
def get_all(self, sort=None, offset=0, limit=None, requester_user=None, **raw_filters):
"""
List all actions.
Handles requests:
GET /actions/views/overview
"""
resp = super(OverviewController, self)._get_all(sort=sort,
offset=offset,
limit=limit,
raw_filters=raw_filters)
result = []
for item in resp.json:
action_api = ActionAPI(**item)
result.append(self._transform_action_api(action_api=action_api,
requester_user=requester_user))
resp.json = result
return resp
@staticmethod
def _transform_action_api(action_api, requester_user):
action_id = action_api.id
result = ParametersViewController._get_one(action_id=action_id,
requester_user=requester_user)
action_api.parameters = result.get('parameters', {})
return action_api
class EntryPointController(resource.ContentPackResourceController):
model = ActionAPI
access = Action
supported_filters = {}
def get_all(self):
return abort(404)
def get_one(self, ref_or_id, requester_user):
"""
Outputs the file associated with action entry_point
Handles requests:
GET /actions/views/entry_point/1
"""
LOG.info('GET /actions/views/entry_point with ref_or_id=%s', ref_or_id)
action_db = self._get_by_ref_or_id(ref_or_id=ref_or_id)
permission_type = PermissionType.ACTION_VIEW
rbac_utils.assert_user_has_resource_db_permission(user_db=requester_user,
resource_db=action_db,
permission_type=permission_type)
pack = getattr(action_db, 'pack', None)
entry_point = getattr(action_db, 'entry_point', None)
abs_path = utils.get_entry_point_abs_path(pack, entry_point)
if not abs_path:
raise StackStormDBObjectNotFoundError('Action ref_or_id=%s has no entry_point to output'
% ref_or_id)
with open(abs_path) as file:
content = file.read()
return content
class ActionViewsController(object):
parameters = ParametersViewController()
overview = OverviewController()
entry_point = EntryPointController()
parameters_view_controller = ParametersViewController()
overview_controller = OverviewController()
entry_point_controller = EntryPointController()
|
{
"content_hash": "55cc651d9e515d3e101972534f768a4e",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 100,
"avg_line_length": 34.73118279569893,
"alnum_prop": 0.5863777089783282,
"repo_name": "tonybaloney/st2",
"id": "dc89925827272a276b2fb5bc7ab4eea268217677",
"size": "7240",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "st2api/st2api/controllers/v1/actionviews.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "198"
},
{
"name": "Makefile",
"bytes": "46066"
},
{
"name": "PowerShell",
"bytes": "299"
},
{
"name": "Python",
"bytes": "4278891"
},
{
"name": "Shell",
"bytes": "47687"
},
{
"name": "Slash",
"bytes": "677"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from cmsplugin_filer_utils.migration import rename_tables_new_to_old
class Migration(DataMigration):
cms_plugin_table_mapping = (
# (old_name, new_name),
('cmsplugin_filerfolder', 'cmsplugin_filer_folder_filerfolder'),
)
needed_by = (
("cms", "0069_static_placeholder_permissions"),
)
def forwards(self, orm):
rename_tables_new_to_old(db, self.cms_plugin_table_mapping)
# rename_tables_new_to_old(db, self.cms_plugin_table_mapping)
for obj in orm['cmsplugin_filer_folder.filerfolder'].objects.all():
obj.style = obj.view_option
obj.save()
def backwards(self, orm):
rename_tables_new_to_old(db, self.cms_plugin_table_mapping)
for obj in orm['cmsplugin_filer_folder.filerfolder'].objects.all():
obj.view_option = obj.style
obj.save()
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'cmsplugin_filer_folder.filerfolder': {
'Meta': {'object_name': 'FilerFolder', 'db_table': "'cmsplugin_filerfolder'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Folder']"}),
'style': ('django.db.models.fields.CharField', [], {'default': "'list'", 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'view_option': ('django.db.models.fields.CharField', [], {'default': "'x'", 'max_length': '10'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
}
}
complete_apps = ['cmsplugin_filer_folder']
symmetrical = True
|
{
"content_hash": "cb4a9ebeec2cd2d7426b0b9ce80a5aa5",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 187,
"avg_line_length": 71.48245614035088,
"alnum_prop": 0.5625230089581543,
"repo_name": "Venturi/oldcms",
"id": "2f26ffda3af2e527f5d97fae2ac0b99d940bbd80",
"size": "8173",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "env/lib/python2.7/site-packages/cmsplugin_filer_folder/migrations/0003_move_view_option_to_style.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "40171"
},
{
"name": "CSS",
"bytes": "418090"
},
{
"name": "HTML",
"bytes": "467117"
},
{
"name": "JavaScript",
"bytes": "916100"
},
{
"name": "PHP",
"bytes": "2231"
},
{
"name": "Python",
"bytes": "15786894"
},
{
"name": "Ruby",
"bytes": "990"
},
{
"name": "Shell",
"bytes": "3743"
},
{
"name": "XSLT",
"bytes": "157892"
}
],
"symlink_target": ""
}
|
"""Tests for reconstruction_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.signal.python.ops import reconstruction_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class ReconstructionOpsTest(test.TestCase):
def __init__(self, *args, **kwargs):
super(ReconstructionOpsTest, self).__init__(*args, **kwargs)
self.batch_size = 3
self.frames = 3
self.samples = 5
self.bases = np.array(range(2, 5))
exponents = np.array(range(self.frames * self.samples))
powers = np.power(self.bases[:, np.newaxis], exponents[np.newaxis, :])
self.powers = np.reshape(powers, [self.batch_size, self.frames,
self.samples])
self.frame_hop = 2
# Hand computed example using powers of unique numbers: this is easily
# verified.
self.expected_string = ["1", "10", "100100", "1001000", "10010010000",
"100100000000", "1001000000000", "10000000000000",
"100000000000000"]
def test_all_ones(self):
signal = constant_op.constant(np.ones((3, 5)), dtype=dtypes.int64)
reconstruction = reconstruction_ops.overlap_and_add(signal, 2)
with self.session(use_gpu=True) as sess:
output = sess.run(reconstruction)
expected_output = np.array([1, 1, 2, 2, 3, 2, 2, 1, 1])
self.assertAllClose(output, expected_output)
def test_simple(self):
def make_input(frame_length, num_frames=3):
"""Generate a tensor of num_frames frames of frame_length."""
return np.reshape(np.arange(1, num_frames * frame_length + 1),
(-1, frame_length))
# List of (signal, expected_result, frame_hop).
configurations = [
# All hop lengths on a frame length of 2.
(make_input(2), [1, 5, 9, 6], 1),
(make_input(2), [1, 2, 3, 4, 5, 6], 2),
# All hop lengths on a frame length of 3.
(make_input(3), [1, 6, 15, 14, 9], 1),
(make_input(3), [1, 2, 7, 5, 13, 8, 9], 2),
(make_input(3), [1, 2, 3, 4, 5, 6, 7, 8, 9], 3),
# All hop lengths on a frame length of 4.
(make_input(4), [1, 7, 18, 21, 19, 12], 1),
(make_input(4), [1, 2, 8, 10, 16, 18, 11, 12], 2),
(make_input(4), [1, 2, 3, 9, 6, 7, 17, 10, 11, 12], 3),
(make_input(4), [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], 4),
]
with self.session(use_gpu=True):
for signal, expected, frame_hop in configurations:
reconstruction = reconstruction_ops.overlap_and_add(
np.array(signal), frame_hop).eval()
expected_output = np.array(expected)
self.assertAllClose(reconstruction, expected_output)
def test_powers(self):
signal = constant_op.constant(np.squeeze(self.powers[0, :, :]),
dtype=dtypes.int64)
reconstruction = reconstruction_ops.overlap_and_add(signal, self.frame_hop)
with self.session(use_gpu=True) as sess:
output = sess.run(reconstruction)
string_output = [np.base_repr(x, self.bases[0]) for x in output]
self.assertEqual(string_output, self.expected_string)
def test_batch(self):
signal = constant_op.constant(self.powers, dtype=dtypes.int64)
reconstruction = reconstruction_ops.overlap_and_add(signal, self.frame_hop)
with self.session(use_gpu=True) as sess:
output = sess.run(reconstruction)
accumulator = True
for i in range(self.batch_size):
string_output = [np.base_repr(x, self.bases[i]) for x in output[i, :]]
accumulator = accumulator and (string_output == self.expected_string)
self.assertTrue(accumulator)
def test_one_element_batch(self):
input_matrix = np.squeeze(self.powers[0, :, :])
input_matrix = input_matrix[np.newaxis, :, :].astype(float)
signal = constant_op.constant(input_matrix, dtype=dtypes.float32)
reconstruction = reconstruction_ops.overlap_and_add(signal, self.frame_hop)
with self.session(use_gpu=True) as sess:
output = sess.run(reconstruction)
string_output = [np.base_repr(int(x), self.bases[0]) for x in
np.squeeze(output)]
self.assertEqual(output.shape, (1, 9))
self.assertEqual(string_output, self.expected_string)
def test_gradient(self):
configurations = [
((1, 128), 1),
((5, 35), 17),
((10, 128), 128),
((2, 10, 128), 127),
((2, 2, 10, 128), 126),
((2, 2, 2, 10, 128), 125),
]
with self.session(use_gpu=True) as sess:
for shape, frame_hop in configurations:
signal = array_ops.zeros(shape)
reconstruction = reconstruction_ops.overlap_and_add(signal, frame_hop)
loss = math_ops.reduce_sum(reconstruction)
# Increasing any sample in the input frames by one will increase the sum
# of all the samples in the reconstruction by 1, so the gradient should
# be all ones, no matter the shape or hop.
gradient = sess.run(gradients_impl.gradients([loss], [signal])[0])
self.assertTrue((gradient == 1.0).all())
def test_gradient_batch(self):
with self.session(use_gpu=True) as sess:
signal = array_ops.zeros((2, 10, 10))
frame_hop = 10
reconstruction = reconstruction_ops.overlap_and_add(signal, frame_hop)
# Multiply the first batch-item's reconstruction by zeros. This will block
# gradient from flowing into the first batch item from the loss. Multiply
# the second batch item by the integers from 0 to 99. Since there is zero
# overlap, the gradient for this batch item will be 0-99 shaped as (10,
# 10).
reconstruction *= array_ops.stack(
[array_ops.zeros((100,)), math_ops.to_float(math_ops.range(100))])
loss = math_ops.reduce_sum(reconstruction)
# Verify that only the second batch item receives gradient.
gradient = sess.run(gradients_impl.gradients([loss], [signal])[0])
expected_gradient = np.stack([
np.zeros((10, 10)),
np.reshape(np.arange(100).astype(np.float32), (10, 10))])
self.assertAllEqual(expected_gradient, gradient)
def test_gradient_numerical(self):
with self.session(use_gpu=True):
shape = (2, 10, 10)
framed_signal = array_ops.zeros(shape)
frame_hop = 10
reconstruction = reconstruction_ops.overlap_and_add(
framed_signal, frame_hop)
error = test.compute_gradient_error(
framed_signal, shape, reconstruction, [2, 100])
self.assertLess(error, 2e-5)
if __name__ == "__main__":
test.main()
|
{
"content_hash": "040f35dc604602ad79efda49849027dd",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 80,
"avg_line_length": 38.668539325842694,
"alnum_prop": 0.6280691558913265,
"repo_name": "dongjoon-hyun/tensorflow",
"id": "c476cd4e00d621bbf694b27236d82f18e8699c8c",
"size": "7572",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/signal/python/kernel_tests/reconstruction_ops_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3301"
},
{
"name": "Batchfile",
"bytes": "10132"
},
{
"name": "C",
"bytes": "446293"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "50950243"
},
{
"name": "CMake",
"bytes": "198845"
},
{
"name": "Dockerfile",
"bytes": "36908"
},
{
"name": "Go",
"bytes": "1285854"
},
{
"name": "HTML",
"bytes": "4681865"
},
{
"name": "Java",
"bytes": "869263"
},
{
"name": "Jupyter Notebook",
"bytes": "2611125"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "62216"
},
{
"name": "Objective-C",
"bytes": "15634"
},
{
"name": "Objective-C++",
"bytes": "101475"
},
{
"name": "PHP",
"bytes": "5191"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "40335927"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "553"
},
{
"name": "Shell",
"bytes": "487251"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
}
|
"""
Various helper functions for plotting.
"""
import numpy as np
from replot import adaptive_sampling
from replot import exceptions as exc
def plot_function(data, *args, **kwargs):
"""
Helper function to handle plotting of unevaluated functions (trying \
to evaluate it nicely and rendering the plot).
:param data: The function to plot.
:returns: A tuple of ``(args, kwargs)`` representing the plot.
.. seealso:: The documentation of the ``replot.Figure.plot`` method.
.. note:: ``args`` is used to handle the interval or point series on \
which the function should be evaluated. ``kwargs`` are passed \
directly to ``matplotlib.pyplot.plot`.
"""
if len(args) == 0:
# If no interval specified, raise an issue
raise exc.InvalidParameterError(
"You should pass a plotting interval to the plot command.")
elif isinstance(args[0], tuple):
# Interval specified, use it and adaptive plotting
x_values, y_values = adaptive_sampling.sample_function(
data,
args[0],
tol=1e-3)
elif isinstance(args[0], (list, np.ndarray)):
# List of points specified, use them and compute values of the
# function
x_values = args[0]
y_values = [data(i) for i in x_values]
else:
raise exc.InvalidParameterError(
"Second parameter in plot command should be a tuple " +
"specifying plotting interval.")
return ((x_values, y_values) + args[1:], kwargs)
|
{
"content_hash": "201dafe331592dca3ab6ec3ca63a3a80",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 75,
"avg_line_length": 36.2093023255814,
"alnum_prop": 0.630057803468208,
"repo_name": "Phyks/replot",
"id": "ecb686ac81457f0f4d315e56150dd9cfed849c7a",
"size": "1557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "replot/helpers/plot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "3642630"
},
{
"name": "Python",
"bytes": "47620"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.