text stringlengths 4 1.02M | meta dict |
|---|---|
"""
Tests for database migrations. This test case reads the configuration
file /tests/unit/test_migrations.conf for database connection settings
to use in the tests. For each connection found in the config file,
the test case runs a series of test cases to ensure that migrations work
properly both upgrading and downgrading, and that no data loss occurs
if possible.
"""
from __future__ import print_function
import ConfigParser
import datetime
import exceptions
import os
import pickle
import subprocess
import uuid
from migrate.versioning import api as migration_api
from migrate.versioning.repository import Repository
from oslo.config import cfg
import six.moves.urllib.parse as urlparse
from six.moves import xrange
from six import text_type
import sqlalchemy
from glance.common import crypt
from glance.common import exception
from glance.common import utils
import glance.db.migration as migration
import glance.db.sqlalchemy.migrate_repo
from glance.db.sqlalchemy.migrate_repo.schema import from_migration_import
from glance.db.sqlalchemy import models
from glance.openstack.common import jsonutils
from glance.openstack.common import log as logging
from glance.openstack.common import timeutils
from glance.tests import utils as test_utils
CONF = cfg.CONF
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
LOG = logging.getLogger(__name__)
def _get_connect_string(backend,
user="openstack_citest",
passwd="openstack_citest",
database="openstack_citest"):
"""
Try to get a connection with a very specific set of values, if we get
these then we'll run the tests, otherwise they are skipped
"""
if backend == "mysql":
backend = "mysql+mysqldb"
elif backend == "postgres":
backend = "postgresql+psycopg2"
return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s"
% {'backend': backend, 'user': user, 'passwd': passwd,
'database': database})
def _is_backend_avail(backend,
user="openstack_citest",
passwd="openstack_citest",
database="openstack_citest"):
try:
if backend == "mysql":
connect_uri = _get_connect_string("mysql", user=user,
passwd=passwd, database=database)
elif backend == "postgres":
connect_uri = _get_connect_string("postgres", user=user,
passwd=passwd, database=database)
engine = sqlalchemy.create_engine(connect_uri)
connection = engine.connect()
except Exception:
# intentionally catch all to handle exceptions even if we don't
# have any backend code loaded.
return False
else:
connection.close()
engine.dispose()
return True
def _have_mysql():
present = os.environ.get('GLANCE_TEST_MYSQL_PRESENT')
if present is None:
return _is_backend_avail('mysql')
return present.lower() in ('', 'true')
def get_table(engine, name):
"""Returns an sqlalchemy table dynamically from db.
Needed because the models don't work for us in migrations
as models will be far out of sync with the current data.
"""
metadata = sqlalchemy.schema.MetaData()
metadata.bind = engine
return sqlalchemy.Table(name, metadata, autoload=True)
class TestMigrations(test_utils.BaseTestCase):
"""Test sqlalchemy-migrate migrations."""
DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__),
'test_migrations.conf')
# Test machines can set the GLANCE_TEST_MIGRATIONS_CONF variable
# to override the location of the config file for migration testing
CONFIG_FILE_PATH = os.environ.get('GLANCE_TEST_MIGRATIONS_CONF',
DEFAULT_CONFIG_FILE)
MIGRATE_FILE = glance.db.sqlalchemy.migrate_repo.__file__
REPOSITORY = Repository(os.path.abspath(os.path.dirname(MIGRATE_FILE)))
def setUp(self):
super(TestMigrations, self).setUp()
self.snake_walk = False
self.test_databases = {}
# Load test databases from the config file. Only do this
# once. No need to re-run this on each test...
LOG.debug('config_path is %s',
text_type(TestMigrations.CONFIG_FILE_PATH))
if os.path.exists(TestMigrations.CONFIG_FILE_PATH):
cp = ConfigParser.RawConfigParser()
try:
cp.read(TestMigrations.CONFIG_FILE_PATH)
defaults = cp.defaults()
for key, value in defaults.items():
self.test_databases[key] = value
self.snake_walk = cp.getboolean('walk_style', 'snake_walk')
except ConfigParser.ParsingError as e:
self.fail("Failed to read test_migrations.conf config "
"file. Got error: %s" % e)
else:
self.fail("Failed to find test_migrations.conf config "
"file.")
self.engines = {}
for key, value in self.test_databases.items():
self.engines[key] = sqlalchemy.create_engine(value)
# We start each test case with a completely blank slate.
self._reset_databases()
def tearDown(self):
# We destroy the test data store between each test case,
# and recreate it, which ensures that we have no side-effects
# from the tests
self._reset_databases()
super(TestMigrations, self).tearDown()
def _reset_databases(self):
def execute_cmd(cmd=None):
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, shell=True)
output = proc.communicate()[0]
LOG.debug(output)
self.assertEqual(0, proc.returncode)
for key, engine in self.engines.items():
conn_string = self.test_databases[key]
conn_pieces = urlparse.urlparse(conn_string)
engine.dispose()
if conn_string.startswith('sqlite'):
# We can just delete the SQLite database, which is
# the easiest and cleanest solution
db_path = conn_pieces.path[1:]
if os.path.exists(db_path):
os.unlink(db_path)
# No need to recreate the SQLite DB. SQLite will
# create it for us if it's not there...
elif conn_string.startswith('mysql'):
# We can execute the MySQL client to destroy and re-create
# the MYSQL database, which is easier and less error-prone
# than using SQLAlchemy to do this via MetaData...trust me.
database = conn_pieces.path.strip('/')
loc_pieces = conn_pieces.netloc.split('@')
host = loc_pieces[1]
auth_pieces = loc_pieces[0].split(':')
user = auth_pieces[0]
password = ""
if len(auth_pieces) > 1:
if auth_pieces[1].strip():
password = "-p\"%s\"" % auth_pieces[1]
sql = ("drop database if exists %(database)s; create "
"database %(database)s;") % {'database': database}
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
"-e \"%(sql)s\"") % {'user': user, 'password': password,
'host': host, 'sql': sql}
execute_cmd(cmd)
elif conn_string.startswith('postgresql'):
database = conn_pieces.path.strip('/')
loc_pieces = conn_pieces.netloc.split('@')
host = loc_pieces[1]
auth_pieces = loc_pieces[0].split(':')
user = auth_pieces[0]
password = ""
if len(auth_pieces) > 1:
password = auth_pieces[1].strip()
# note(boris-42): This file is used for authentication
# without password prompt.
createpgpass = ("echo '*:*:*:%(user)s:%(password)s' > "
"~/.pgpass && chmod 0600 ~/.pgpass" %
{'user': user, 'password': password})
execute_cmd(createpgpass)
# note(boris-42): We must create and drop database, we can't
# drop database which we have connected to, so for such
# operations there is a special database template1.
sqlcmd = ("psql -w -U %(user)s -h %(host)s -c"
" '%(sql)s' -d template1")
sql = ("drop database if exists %(database)s;")
sql = sql % {'database': database}
droptable = sqlcmd % {'user': user, 'host': host,
'sql': sql}
execute_cmd(droptable)
sql = ("create database %(database)s;")
sql = sql % {'database': database}
createtable = sqlcmd % {'user': user, 'host': host,
'sql': sql}
execute_cmd(createtable)
def test_walk_versions(self):
"""
Walks all version scripts for each tested database, ensuring
that there are no errors in the version scripts for each engine
"""
for key, engine in self.engines.items():
self._walk_versions(engine, self.snake_walk)
def test_mysql_connect_fail(self):
"""
Test that we can trigger a mysql connection failure and we fail
gracefully to ensure we don't break people without mysql
"""
if _is_backend_avail('mysql', user="openstack_cifail"):
self.fail("Shouldn't have connected")
def test_mysql_opportunistically(self):
# Test that table creation on mysql only builds InnoDB tables
if not _is_backend_avail('mysql'):
self.skipTest("mysql not available")
# add this to the global lists to make reset work with it, it's removed
# automatically in tearDown so no need to clean it up here.
connect_string = _get_connect_string("mysql")
engine = sqlalchemy.create_engine(connect_string)
self.engines["mysqlcitest"] = engine
self.test_databases["mysqlcitest"] = connect_string
# build a fully populated mysql database with all the tables
self._reset_databases()
self._walk_versions(engine, False, False)
connection = engine.connect()
# sanity check
total = connection.execute("SELECT count(*) "
"from information_schema.TABLES "
"where TABLE_SCHEMA='openstack_citest'")
self.assertTrue(total.scalar() > 0, "No tables found. Wrong schema?")
noninnodb = connection.execute("SELECT count(*) "
"from information_schema.TABLES "
"where TABLE_SCHEMA='openstack_citest' "
"and ENGINE!='InnoDB' "
"and TABLE_NAME!='migrate_version'")
count = noninnodb.scalar()
self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
connection.close()
def test_postgresql_connect_fail(self):
"""
Test that we can trigger a postgres connection failure and we fail
gracefully to ensure we don't break people without postgres
"""
if _is_backend_avail('postgresql', user="openstack_cifail"):
self.fail("Shouldn't have connected")
def test_postgresql_opportunistically(self):
# Test postgresql database migration walk
if not _is_backend_avail('postgres'):
self.skipTest("postgresql not available")
# add this to the global lists to make reset work with it, it's removed
# automatically in tearDown so no need to clean it up here.
connect_string = _get_connect_string("postgres")
engine = sqlalchemy.create_engine(connect_string)
self.engines["postgresqlcitest"] = engine
self.test_databases["postgresqlcitest"] = connect_string
# build a fully populated postgresql database with all the tables
self._reset_databases()
self._walk_versions(engine, False, False)
def _walk_versions(self, engine=None, snake_walk=False, downgrade=True,
initial_version=None):
# Determine latest version script from the repo, then
# upgrade from 1 through to the latest, with no data
# in the databases. This just checks that the schema itself
# upgrades successfully.
def db_version():
return migration_api.db_version(engine, TestMigrations.REPOSITORY)
# Place the database under version control
init_version = migration.INIT_VERSION
if initial_version is not None:
init_version = initial_version
migration_api.version_control(engine, TestMigrations.REPOSITORY,
init_version)
self.assertEqual(init_version, db_version())
migration_api.upgrade(engine, TestMigrations.REPOSITORY,
init_version + 1)
self.assertEqual(init_version + 1, db_version())
LOG.debug('latest version is %s', TestMigrations.REPOSITORY.latest)
for version in xrange(init_version + 2,
TestMigrations.REPOSITORY.latest + 1):
# upgrade -> downgrade -> upgrade
self._migrate_up(engine, version, with_data=True)
if snake_walk:
self._migrate_down(engine, version - 1, with_data=True)
self._migrate_up(engine, version)
if downgrade:
# Now walk it back down to 0 from the latest, testing
# the downgrade paths.
for version in reversed(
xrange(init_version + 2,
TestMigrations.REPOSITORY.latest + 1)):
# downgrade -> upgrade -> downgrade
self._migrate_down(engine, version - 1)
if snake_walk:
self._migrate_up(engine, version)
self._migrate_down(engine, version - 1)
# Ensure we made it all the way back to the first migration
self.assertEqual(init_version + 1, db_version())
def _migrate_down(self, engine, version, with_data=False):
migration_api.downgrade(engine,
TestMigrations.REPOSITORY,
version)
self.assertEqual(version,
migration_api.db_version(engine,
TestMigrations.REPOSITORY))
# NOTE(sirp): `version` is what we're downgrading to (i.e. the 'target'
# version). So if we have any downgrade checks, they need to be run for
# the previous (higher numbered) migration.
if with_data:
post_downgrade = getattr(self, "_post_downgrade_%03d" %
(version + 1), None)
if post_downgrade:
post_downgrade(engine)
def _migrate_up(self, engine, version, with_data=False):
"""migrate up to a new version of the db.
We allow for data insertion and post checks at every
migration version with special _pre_upgrade_### and
_check_### functions in the main test.
"""
if with_data:
data = None
pre_upgrade = getattr(self, "_pre_upgrade_%3.3d" % version, None)
if pre_upgrade:
data = pre_upgrade(engine)
migration_api.upgrade(engine,
TestMigrations.REPOSITORY,
version)
self.assertEqual(version,
migration_api.db_version(engine,
TestMigrations.REPOSITORY))
if with_data:
check = getattr(self, "_check_%3.3d" % version, None)
if check:
check(engine, data)
def _create_unversioned_001_db(self, engine):
# Create the initial version of the images table
meta = sqlalchemy.schema.MetaData()
meta.bind = engine
images_001 = sqlalchemy.Table('images', meta,
sqlalchemy.Column('id', models.Integer,
primary_key=True),
sqlalchemy.Column('name',
sqlalchemy.String(255)
),
sqlalchemy.Column('type',
sqlalchemy.String(30)),
sqlalchemy.Column('size',
sqlalchemy.Integer),
sqlalchemy.Column('status',
sqlalchemy.String(30)),
sqlalchemy.Column('is_public',
sqlalchemy.Boolean,
default=False),
sqlalchemy.Column('location',
sqlalchemy.Text),
sqlalchemy.Column('created_at',
sqlalchemy.DateTime(),
nullable=False),
sqlalchemy.Column('updated_at',
sqlalchemy.DateTime()),
sqlalchemy.Column('deleted_at',
sqlalchemy.DateTime()),
sqlalchemy.Column('deleted',
sqlalchemy.Boolean(),
nullable=False,
default=False))
images_001.create()
def test_version_control_existing_db(self):
"""
Creates a DB without version control information, places it
under version control and checks that it can be upgraded
without errors.
"""
for key, engine in self.engines.items():
self._create_unversioned_001_db(engine)
self._walk_versions(engine, self.snake_walk, initial_version=1)
def _pre_upgrade_003(self, engine):
now = datetime.datetime.now()
images = get_table(engine, 'images')
data = {'deleted': False, 'created_at': now, 'updated_at': now,
'type': 'kernel', 'status': 'active', 'is_public': True}
images.insert().values(data).execute()
return data
def _check_003(self, engine, data):
images = get_table(engine, 'images')
self.assertTrue('type' not in images.c,
"'type' column found in images table columns! "
"images table columns reported by metadata: %s\n"
% images.c.keys())
images_prop = get_table(engine, 'image_properties')
result = images_prop.select().execute()
types = []
for row in result:
if row['key'] == 'type':
types.append(row['value'])
self.assertIn(data['type'], types)
def _pre_upgrade_004(self, engine):
"""Insert checksum data sample to check if migration goes fine with
data.
"""
now = timeutils.utcnow()
images = get_table(engine, 'images')
data = [
{
'deleted': False, 'created_at': now, 'updated_at': now,
'type': 'kernel', 'status': 'active', 'is_public': True,
}
]
engine.execute(images.insert(), data)
return data
def _check_004(self, engine, data):
"""Assure that checksum data is present on table"""
images = get_table(engine, 'images')
self.assertIn('checksum', images.c)
self.assertEqual(images.c['checksum'].type.length, 32)
def _pre_upgrade_005(self, engine):
now = timeutils.utcnow()
images = get_table(engine, 'images')
data = [
{
'deleted': False, 'created_at': now, 'updated_at': now,
'type': 'kernel', 'status': 'active', 'is_public': True,
# Integer type signed size limit
'size': 2147483647
}
]
engine.execute(images.insert(), data)
return data
def _check_005(self, engine, data):
images = get_table(engine, 'images')
select = images.select().execute()
sizes = [row['size'] for row in select if row['size'] is not None]
migrated_data_sizes = [element['size'] for element in data]
for migrated in migrated_data_sizes:
self.assertIn(migrated, sizes)
def _pre_upgrade_006(self, engine):
now = timeutils.utcnow()
images = get_table(engine, 'images')
image_data = [
{
'deleted': False, 'created_at': now, 'updated_at': now,
'type': 'kernel', 'status': 'active', 'is_public': True,
'id': 9999,
}
]
engine.execute(images.insert(), image_data)
images_properties = get_table(engine, 'image_properties')
properties_data = [
{
'id': 10, 'image_id': 9999, 'updated_at': now,
'created_at': now, 'deleted': False, 'key': 'image_name'
}
]
engine.execute(images_properties.insert(), properties_data)
return properties_data
def _check_006(self, engine, data):
images_properties = get_table(engine, 'image_properties')
select = images_properties.select().execute()
# load names from name collumn
image_names = [row['name'] for row in select]
# check names from data in image names from name collumn
for element in data:
self.assertIn(element['key'], image_names)
def _pre_upgrade_010(self, engine):
"""Test rows in images with NULL updated_at get updated to equal
created_at.
"""
initial_values = [
(datetime.datetime(1999, 1, 2, 4, 10, 20),
datetime.datetime(1999, 1, 2, 4, 10, 30)),
(datetime.datetime(1999, 2, 4, 6, 15, 25),
datetime.datetime(1999, 2, 4, 6, 15, 35)),
(datetime.datetime(1999, 3, 6, 8, 20, 30),
None),
(datetime.datetime(1999, 4, 8, 10, 25, 35),
None),
]
images = get_table(engine, 'images')
for created_at, updated_at in initial_values:
row = dict(deleted=False,
created_at=created_at,
updated_at=updated_at,
status='active',
is_public=True,
min_disk=0,
min_ram=0)
images.insert().values(row).execute()
return initial_values
def _check_010(self, engine, data):
values = dict((c, u) for c, u in data)
images = get_table(engine, 'images')
for row in images.select().execute():
if row['created_at'] in values:
# updated_at should be unchanged if not previous NULL, or
# set to created_at if previously NULL
updated_at = values.pop(row['created_at']) or row['created_at']
self.assertEqual(row['updated_at'], updated_at)
# No initial values should be remaining
self.assertEqual(len(values), 0)
def _pre_upgrade_012(self, engine):
"""Test rows in images have id changes from int to varchar(32) and
value changed from int to UUID. Also test image_members and
image_properties gets updated to point to new UUID keys.
"""
images = get_table(engine, 'images')
image_members = get_table(engine, 'image_members')
image_properties = get_table(engine, 'image_properties')
# Insert kernel, ramdisk and normal images
now = timeutils.utcnow()
data = {'created_at': now, 'updated_at': now,
'status': 'active', 'deleted': False,
'is_public': True, 'min_disk': 0, 'min_ram': 0}
test_data = {}
for name in ('kernel', 'ramdisk', 'normal'):
data['name'] = '%s migration 012 test' % name
result = images.insert().values(data).execute()
test_data[name] = result.inserted_primary_key[0]
# Insert image_members and image_properties rows
data = {'created_at': now, 'updated_at': now, 'deleted': False,
'image_id': test_data['normal'], 'member': 'foobar',
'can_share': False}
result = image_members.insert().values(data).execute()
test_data['member'] = result.inserted_primary_key[0]
data = {'created_at': now, 'updated_at': now, 'deleted': False,
'image_id': test_data['normal'], 'name': 'ramdisk_id',
'value': test_data['ramdisk']}
result = image_properties.insert().values(data).execute()
test_data['properties'] = [result.inserted_primary_key[0]]
data.update({'name': 'kernel_id', 'value': test_data['kernel']})
result = image_properties.insert().values(data).execute()
test_data['properties'].append(result.inserted_primary_key)
return test_data
def _check_012(self, engine, test_data):
images = get_table(engine, 'images')
image_members = get_table(engine, 'image_members')
image_properties = get_table(engine, 'image_properties')
# Find kernel, ramdisk and normal images. Make sure id has been
# changed to a uuid
uuids = {}
for name in ('kernel', 'ramdisk', 'normal'):
image_name = '%s migration 012 test' % name
rows = images.select()\
.where(images.c.name == image_name)\
.execute().fetchall()
self.assertEqual(len(rows), 1)
row = rows[0]
self.assertTrue(utils.is_uuid_like(row['id']))
uuids[name] = row['id']
# Find all image_members to ensure image_id has been updated
results = image_members.select()\
.where(image_members.c.image_id ==
uuids['normal'])\
.execute().fetchall()
self.assertEqual(len(results), 1)
# Find all image_properties to ensure image_id has been updated
# as well as ensure kernel_id and ramdisk_id values have been
# updated too
results = image_properties.select()\
.where(image_properties.c.image_id ==
uuids['normal'])\
.execute().fetchall()
self.assertEqual(len(results), 2)
for row in results:
self.assertIn(row['name'], ('kernel_id', 'ramdisk_id'))
if row['name'] == 'kernel_id':
self.assertEqual(row['value'], uuids['kernel'])
if row['name'] == 'ramdisk_id':
self.assertEqual(row['value'], uuids['ramdisk'])
def _post_downgrade_012(self, engine):
images = get_table(engine, 'images')
image_members = get_table(engine, 'image_members')
image_properties = get_table(engine, 'image_properties')
# Find kernel, ramdisk and normal images. Make sure id has been
# changed back to an integer
ids = {}
for name in ('kernel', 'ramdisk', 'normal'):
image_name = '%s migration 012 test' % name
rows = images.select()\
.where(images.c.name == image_name)\
.execute().fetchall()
self.assertEqual(len(rows), 1)
row = rows[0]
self.assertFalse(utils.is_uuid_like(row['id']))
ids[name] = row['id']
# Find all image_members to ensure image_id has been updated
results = image_members.select()\
.where(image_members.c.image_id ==
ids['normal'])\
.execute().fetchall()
self.assertEqual(len(results), 1)
# Find all image_properties to ensure image_id has been updated
# as well as ensure kernel_id and ramdisk_id values have been
# updated too
results = image_properties.select()\
.where(image_properties.c.image_id ==
ids['normal'])\
.execute().fetchall()
self.assertEqual(len(results), 2)
for row in results:
self.assertIn(row['name'], ('kernel_id', 'ramdisk_id'))
if row['name'] == 'kernel_id':
self.assertEqual(row['value'], str(ids['kernel']))
if row['name'] == 'ramdisk_id':
self.assertEqual(row['value'], str(ids['ramdisk']))
def _assert_invalid_swift_uri_raises_bad_store_uri(self,
legacy_parse_uri_fn):
invalid_uri = ('swift://http://acct:usr:pass@example.com'
'/container/obj-id')
# URI cannot contain more than one occurrence of a scheme.
self.assertRaises(exception.BadStoreUri,
legacy_parse_uri_fn,
invalid_uri,
True)
invalid_scheme_uri = ('http://acct:usr:pass@example.com'
'/container/obj-id')
self.assertRaises(exceptions.AssertionError,
legacy_parse_uri_fn,
invalid_scheme_uri,
True)
invalid_account_missing_uri = 'swift+http://container/obj-id'
# Badly formed S3 URI: swift+http://container/obj-id
self.assertRaises(exception.BadStoreUri,
legacy_parse_uri_fn,
invalid_account_missing_uri,
True)
invalid_container_missing_uri = ('swift+http://'
'acct:usr:pass@example.com/obj-id')
# Badly formed S3 URI: swift+http://acct:usr:pass@example.com/obj-id
self.assertRaises(exception.BadStoreUri,
legacy_parse_uri_fn,
invalid_container_missing_uri,
True)
invalid_object_missing_uri = ('swift+http://'
'acct:usr:pass@example.com/container')
# Badly formed S3 URI: swift+http://acct:usr:pass@example.com/container
self.assertRaises(exception.BadStoreUri,
legacy_parse_uri_fn,
invalid_object_missing_uri,
True)
invalid_user_without_pass_uri = ('swift://acctusr@example.com'
'/container/obj-id')
# Badly formed credentials '%(creds)s' in Swift URI
self.assertRaises(exception.BadStoreUri,
legacy_parse_uri_fn,
invalid_user_without_pass_uri,
True)
# Badly formed credentials in Swift URI.
self.assertRaises(exception.BadStoreUri,
legacy_parse_uri_fn,
invalid_user_without_pass_uri,
False)
def test_legacy_parse_swift_uri_015(self):
(legacy_parse_uri,) = from_migration_import(
'015_quote_swift_credentials', ['legacy_parse_uri'])
uri = legacy_parse_uri(
'swift://acct:usr:pass@example.com/container/obj-id',
True)
self.assertTrue(uri, 'swift://acct%3Ausr:pass@example.com'
'/container/obj-id')
self._assert_invalid_swift_uri_raises_bad_store_uri(legacy_parse_uri)
def _pre_upgrade_015(self, engine):
images = get_table(engine, 'images')
unquoted_locations = [
'swift://acct:usr:pass@example.com/container/obj-id',
'file://foo',
]
now = datetime.datetime.now()
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0)
data = []
for i, location in enumerate(unquoted_locations):
temp.update(location=location, id=str(uuid.uuid4()))
data.append(temp)
images.insert().values(temp).execute()
return data
def _check_015(self, engine, data):
images = get_table(engine, 'images')
quoted_locations = [
'swift://acct%3Ausr:pass@example.com/container/obj-id',
'file://foo',
]
result = images.select().execute()
locations = map(lambda x: x['location'], result)
for loc in quoted_locations:
self.assertIn(loc, locations)
def _pre_upgrade_016(self, engine):
images = get_table(engine, 'images')
now = datetime.datetime.now()
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0,
id='fake-image-id1')
images.insert().values(temp).execute()
image_members = get_table(engine, 'image_members')
now = datetime.datetime.now()
data = {'deleted': False,
'created_at': now,
'member': 'fake-member',
'updated_at': now,
'can_share': False,
'image_id': 'fake-image-id1'}
image_members.insert().values(data).execute()
return data
def _check_016(self, engine, data):
image_members = get_table(engine, 'image_members')
self.assertTrue('status' in image_members.c,
"'status' column found in image_members table "
"columns! image_members table columns: %s"
% image_members.c.keys())
def test_legacy_parse_swift_uri_017(self):
metadata_encryption_key = 'a' * 16
self.config(metadata_encryption_key=metadata_encryption_key)
(legacy_parse_uri, encrypt_location) = from_migration_import(
'017_quote_encrypted_swift_credentials', ['legacy_parse_uri',
'encrypt_location'])
uri = legacy_parse_uri('swift://acct:usr:pass@example.com'
'/container/obj-id', True)
self.assertTrue(uri, encrypt_location(
'swift://acct%3Ausr:pass@example.com/container/obj-id'))
self._assert_invalid_swift_uri_raises_bad_store_uri(legacy_parse_uri)
def _pre_upgrade_017(self, engine):
metadata_encryption_key = 'a' * 16
self.config(metadata_encryption_key=metadata_encryption_key)
images = get_table(engine, 'images')
unquoted = 'swift://acct:usr:pass@example.com/container/obj-id'
encrypted_unquoted = crypt.urlsafe_encrypt(
metadata_encryption_key,
unquoted, 64)
data = []
now = datetime.datetime.now()
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0,
location=encrypted_unquoted,
id='fakeid1')
images.insert().values(temp).execute()
locations = [
'file://ab',
'file://abc',
'swift://acct3A%foobar:pass@example.com/container/obj-id2'
]
now = datetime.datetime.now()
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0)
for i, location in enumerate(locations):
temp.update(location=location, id=str(uuid.uuid4()))
data.append(temp)
images.insert().values(temp).execute()
return data
def _check_017(self, engine, data):
metadata_encryption_key = 'a' * 16
quoted = 'swift://acct%3Ausr:pass@example.com/container/obj-id'
images = get_table(engine, 'images')
result = images.select().execute()
locations = map(lambda x: x['location'], result)
actual_location = []
for location in locations:
if location:
try:
temp_loc = crypt.urlsafe_decrypt(metadata_encryption_key,
location)
actual_location.append(temp_loc)
except TypeError:
actual_location.append(location)
except ValueError:
actual_location.append(location)
self.assertIn(quoted, actual_location)
loc_list = ['file://ab',
'file://abc',
'swift://acct3A%foobar:pass@example.com/container/obj-id2']
for location in loc_list:
if location not in actual_location:
self.fail(_("location: %s data lost") % location)
def _pre_upgrade_019(self, engine):
images = get_table(engine, 'images')
now = datetime.datetime.now()
base_values = {
'deleted': False,
'created_at': now,
'updated_at': now,
'status': 'active',
'is_public': True,
'min_disk': 0,
'min_ram': 0,
}
data = [
{'id': 'fake-19-1', 'location': 'http://glance.example.com'},
#NOTE(bcwaldon): images with a location of None should
# not be migrated
{'id': 'fake-19-2', 'location': None},
]
map(lambda image: image.update(base_values), data)
for image in data:
images.insert().values(image).execute()
return data
def _check_019(self, engine, data):
image_locations = get_table(engine, 'image_locations')
records = image_locations.select().execute().fetchall()
locations = dict([(il.image_id, il.value) for il in records])
self.assertEqual(locations.get('fake-19-1'),
'http://glance.example.com')
def _check_020(self, engine, data):
images = get_table(engine, 'images')
self.assertNotIn('location', images.c)
def _pre_upgrade_026(self, engine):
image_locations = get_table(engine, 'image_locations')
now = datetime.datetime.now()
image_id = 'fake_id'
url = 'file:///some/place/onthe/fs'
images = get_table(engine, 'images')
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0,
id=image_id)
images.insert().values(temp).execute()
temp = dict(deleted=False,
created_at=now,
updated_at=now,
image_id=image_id,
value=url)
image_locations.insert().values(temp).execute()
return image_id
def _check_026(self, engine, data):
image_locations = get_table(engine, 'image_locations')
results = image_locations.select()\
.where(image_locations.c.image_id == data).execute()
r = list(results)
self.assertEqual(len(r), 1)
self.assertEqual(r[0]['value'], 'file:///some/place/onthe/fs')
self.assertIn('meta_data', r[0])
x = pickle.loads(r[0]['meta_data'])
self.assertEqual(x, {})
def _check_027(self, engine, data):
table = "images"
index = "checksum_image_idx"
columns = ["checksum"]
meta = sqlalchemy.MetaData()
meta.bind = engine
new_table = sqlalchemy.Table(table, meta, autoload=True)
index_data = [(idx.name, idx.columns.keys())
for idx in new_table.indexes]
self.assertIn((index, columns), index_data)
def _check_028(self, engine, data):
owner_index = "owner_image_idx"
columns = ["owner"]
images_table = get_table(engine, 'images')
index_data = [(idx.name, idx.columns.keys())
for idx in images_table.indexes
if idx.name == owner_index]
self.assertIn((owner_index, columns), index_data)
def _post_downgrade_028(self, engine):
owner_index = "owner_image_idx"
columns = ["owner"]
images_table = get_table(engine, 'images')
index_data = [(idx.name, idx.columns.keys())
for idx in images_table.indexes
if idx.name == owner_index]
self.assertNotIn((owner_index, columns), index_data)
def _pre_upgrade_029(self, engine):
image_locations = get_table(engine, 'image_locations')
meta_data = {'somelist': ['a', 'b', 'c'], 'avalue': 'hello',
'adict': {}}
now = datetime.datetime.now()
image_id = 'fake_029_id'
url = 'file:///some/place/onthe/fs029'
images = get_table(engine, 'images')
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0,
id=image_id)
images.insert().values(temp).execute()
pickle_md = pickle.dumps(meta_data)
temp = dict(deleted=False,
created_at=now,
updated_at=now,
image_id=image_id,
value=url,
meta_data=pickle_md)
image_locations.insert().values(temp).execute()
return meta_data, image_id
def _check_029(self, engine, data):
meta_data = data[0]
image_id = data[1]
image_locations = get_table(engine, 'image_locations')
records = image_locations.select().\
where(image_locations.c.image_id == image_id).execute().fetchall()
for r in records:
d = jsonutils.loads(r['meta_data'])
self.assertEqual(d, meta_data)
def _post_downgrade_029(self, engine):
image_id = 'fake_029_id'
image_locations = get_table(engine, 'image_locations')
records = image_locations.select().\
where(image_locations.c.image_id == image_id).execute().fetchall()
for r in records:
md = r['meta_data']
d = pickle.loads(md)
self.assertIsInstance(d, dict)
def _check_030(self, engine, data):
table = "tasks"
index_type = ('ix_tasks_type', ['type'])
index_status = ('ix_tasks_status', ['status'])
index_owner = ('ix_tasks_owner', ['owner'])
index_deleted = ('ix_tasks_deleted', ['deleted'])
index_updated_at = ('ix_tasks_updated_at', ['updated_at'])
meta = sqlalchemy.MetaData()
meta.bind = engine
tasks_table = sqlalchemy.Table(table, meta, autoload=True)
index_data = [(idx.name, idx.columns.keys())
for idx in tasks_table.indexes]
self.assertIn(index_type, index_data)
self.assertIn(index_status, index_data)
self.assertIn(index_owner, index_data)
self.assertIn(index_deleted, index_data)
self.assertIn(index_updated_at, index_data)
expected = [u'id',
u'type',
u'status',
u'owner',
u'input',
u'result',
u'message',
u'expires_at',
u'created_at',
u'updated_at',
u'deleted_at',
u'deleted']
# NOTE(flwang): Skip the column type checking for now since Jenkins is
# using sqlalchemy.dialects.postgresql.base.TIMESTAMP instead of
# DATETIME which is using by mysql and sqlite.
col_data = [col.name for col in tasks_table.columns]
self.assertEqual(expected, col_data)
def _post_downgrade_030(self, engine):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
get_table, engine, 'tasks')
def _pre_upgrade_031(self, engine):
images = get_table(engine, 'images')
now = datetime.datetime.now()
image_id = 'fake_031_id'
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0,
id=image_id)
images.insert().values(temp).execute()
locations_table = get_table(engine, 'image_locations')
locations = [
('file://ab', '{"a": "yo yo"}'),
('file://ab', '{}'),
('file://ab', '{}'),
('file://ab1', '{"a": "that one, please"}'),
('file://ab1', '{"a": "that one, please"}'),
]
temp = dict(deleted=False,
created_at=now,
updated_at=now,
image_id=image_id)
for location, metadata in locations:
temp.update(value=location, meta_data=metadata)
locations_table.insert().values(temp).execute()
return image_id
def _check_031(self, engine, image_id):
locations_table = get_table(engine, 'image_locations')
result = locations_table.select()\
.where(locations_table.c.image_id == image_id)\
.execute().fetchall()
locations = set([(x['value'], x['meta_data']) for x in result])
actual_locations = set([
('file://ab', '{"a": "yo yo"}'),
('file://ab', '{}'),
('file://ab1', '{"a": "that one, please"}'),
])
self.assertFalse(actual_locations.symmetric_difference(locations))
def _pre_upgrade_032(self, engine):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
get_table, engine, 'task_info')
tasks = get_table(engine, 'tasks')
now = datetime.datetime.now()
base_values = {
'deleted': False,
'created_at': now,
'updated_at': now,
'status': 'active',
'owner': 'TENANT',
'type': 'import',
}
data = [
{
'id': 'task-1',
'input': 'some input',
'message': None,
'result': 'successful'
},
{
'id': 'task-2',
'input': None,
'message': None,
'result': None
},
]
map(lambda task: task.update(base_values), data)
for task in data:
tasks.insert().values(task).execute()
return data
def _check_032(self, engine, data):
task_info_table = get_table(engine, 'task_info')
task_info_refs = task_info_table.select().execute().fetchall()
self.assertEqual(len(task_info_refs), 2)
for x in range(len(task_info_refs)):
self.assertEqual(task_info_refs[x].task_id, data[x]['id'])
self.assertEqual(task_info_refs[x].input, data[x]['input'])
self.assertEqual(task_info_refs[x].result, data[x]['result'])
self.assertIsNone(task_info_refs[x].message)
tasks_table = get_table(engine, 'tasks')
self.assertNotIn('input', tasks_table.c)
self.assertNotIn('result', tasks_table.c)
self.assertNotIn('message', tasks_table.c)
def _post_downgrade_032(self, engine):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
get_table, engine, 'task_info')
tasks_table = get_table(engine, 'tasks')
records = tasks_table.select().execute().fetchall()
self.assertEqual(len(records), 2)
tasks = dict([(t.id, t) for t in records])
task_1 = tasks.get('task-1')
self.assertEqual(task_1.input, 'some input')
self.assertEqual(task_1.result, 'successful')
self.assertIsNone(task_1.message)
task_2 = tasks.get('task-2')
self.assertIsNone(task_2.input)
self.assertIsNone(task_2.result)
self.assertIsNone(task_2.message)
def _pre_upgrade_033(self, engine):
images = get_table(engine, 'images')
image_locations = get_table(engine, 'image_locations')
now = datetime.datetime.now()
image_id = 'fake_id_028_%d'
url = 'file:///some/place/onthe/fs_%d'
status_list = ['active', 'saving', 'queued', 'killed',
'pending_delete', 'deleted']
image_id_list = []
for (idx, status) in enumerate(status_list):
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status=status,
is_public=True,
min_disk=0,
min_ram=0,
id=image_id % idx)
images.insert().values(temp).execute()
temp = dict(deleted=False,
created_at=now,
updated_at=now,
image_id=image_id % idx,
value=url % idx)
image_locations.insert().values(temp).execute()
image_id_list.append(image_id % idx)
return image_id_list
def _check_033(self, engine, data):
image_locations = get_table(engine, 'image_locations')
self.assertIn('status', image_locations.c)
self.assertEqual(image_locations.c['status'].type.length, 30)
status_list = ['active', 'active', 'active',
'deleted', 'pending_delete', 'deleted']
for (idx, image_id) in enumerate(data):
results = image_locations.select()\
.where(image_locations.c.image_id == image_id).execute()
r = list(results)
self.assertEqual(len(r), 1)
self.assertIn('status', r[0])
self.assertEqual(r[0]['status'], status_list[idx])
def _post_downgrade_033(self, engine):
image_locations = get_table(engine, 'image_locations')
self.assertNotIn('status', image_locations.c)
def _pre_upgrade_034(self, engine):
images = get_table(engine, 'images')
now = datetime.datetime.now()
image_id = 'fake_id_034'
temp = dict(deleted=False,
created_at=now,
updated_at=now,
status='active',
is_public=True,
min_disk=0,
min_ram=0,
id=image_id)
images.insert().values(temp).execute()
def _check_034(self, engine, data):
images = get_table(engine, 'images')
self.assertIn('virtual_size', images.c)
result = (images.select()
.where(images.c.id == 'fake_id_034')
.execute().fetchone())
self.assertIsNone(result.virtual_size)
def _post_downgrade_034(self, engine):
images = get_table(engine, 'images')
self.assertNotIn('virtual_size', images.c)
| {
"content_hash": "18c7f9fba3cc3eb737477a747caff318",
"timestamp": "",
"source": "github",
"line_count": 1324,
"max_line_length": 79,
"avg_line_length": 39.93882175226586,
"alnum_prop": 0.5244047731613684,
"repo_name": "tanglei528/glance",
"id": "7956c5a721870ec3aac319d2f82dea9abaeb10df",
"size": "53547",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "glance/tests/unit/test_migrations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3193082"
},
{
"name": "Shell",
"bytes": "7168"
}
],
"symlink_target": ""
} |
from ._service_fabric_management_client import ServiceFabricManagementClient
from ._version import VERSION
__version__ = VERSION
try:
from ._patch import __all__ as _patch_all
from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk
__all__ = [
"ServiceFabricManagementClient",
]
__all__.extend([p for p in _patch_all if p not in __all__])
_patch_sdk()
| {
"content_hash": "a01bab34adbf7b5742b79d03cc1711b5",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 82,
"avg_line_length": 26.38888888888889,
"alnum_prop": 0.6905263157894737,
"repo_name": "Azure/azure-sdk-for-python",
"id": "b4beea6828430a9d5cb94efb4689e785aaa72163",
"size": "943",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/servicefabric/azure-mgmt-servicefabric/azure/mgmt/servicefabric/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
"""
Script to help run migrations every time the models are changed
"""
import os
import unittest
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from app import db, create_app
# coverage stuff
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
# initialize the app based on configurations
app = create_app (config_name=os.getenv('APP_SETTINGS'))
migrate = Migrate(app, db)
manager = Manager(app)
# create migration command
manager.add_command('db', MigrateCommand)
# create test command
@manager.command
def test(coverage=False):
"""
Run the tests
"""
# Setup coverage
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
# load the tests
tests = unittest.TestLoader().discover('./test', pattern='test*.py')
# run the tests
result = unittest.TextTestRunner(verbosity=2).run(tests)
# save coverage report
if COV:
COV.stop()
COV.save()
if result.wasSuccessful():
return 0
return 1
from app.models.shopping import User,ShoppingList, ShoppingItem
if __name__ == '__main__':
manager.run()
| {
"content_hash": "0239a596ddb399cc34fe94fbdac1dc86",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 72,
"avg_line_length": 23.31578947368421,
"alnum_prop": 0.6772009029345373,
"repo_name": "Tinitto/ShoppingListAPI",
"id": "239ba4e370f71176ee270f5c7d0c8857daa8f0ac",
"size": "1329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "121763"
}
],
"symlink_target": ""
} |
"""Defines interface for DB access.
The underlying driver is loaded as a :class:`LazyPluggable`.
Functions in this module are imported into the nova.virt.baremetal.db
namespace. Call these functions from nova.virt.baremetal.db namespace, not
the nova.virt.baremetal.db.api namespace.
All functions in this module return objects that implement a dictionary-like
interface. Currently, many of these objects are sqlalchemy objects that
implement a dictionary interface. However, a future goal is to have all of
these objects be simple dictionaries.
**Related Flags**
:baremetal_db_backend: string to lookup in the list of LazyPluggable backends.
`sqlalchemy` is the only supported backend right now.
:[BAREMETAL] sql_connection: string specifying the sqlalchemy connection to
use, like: `sqlite:///var/lib/nova/nova.sqlite`.
"""
from nova.openstack.common import cfg
from nova import utils
# NOTE(deva): we can't move baremetal_db_backend into an OptGroup yet
# because utils.LazyPluggable doesn't support reading from
# option groups. See bug #1093043.
db_opts = [
cfg.StrOpt('baremetal_db_backend',
default='sqlalchemy',
help='The backend to use for db'),
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
IMPL = utils.LazyPluggable(
'baremetal_db_backend',
sqlalchemy='nova.virt.baremetal.db.sqlalchemy.api')
def bm_node_get_all(context, service_host=None):
return IMPL.bm_node_get_all(context,
service_host=service_host)
def bm_node_find_free(context, service_host=None,
memory_mb=None, cpus=None, local_gb=None):
return IMPL.bm_node_find_free(context,
service_host=service_host,
memory_mb=memory_mb,
cpus=cpus,
local_gb=local_gb)
def bm_node_get(context, bm_node_id):
return IMPL.bm_node_get(context, bm_node_id)
def bm_node_get_by_instance_uuid(context, instance_uuid):
return IMPL.bm_node_get_by_instance_uuid(context,
instance_uuid)
def bm_node_create(context, values):
return IMPL.bm_node_create(context, values)
def bm_node_destroy(context, bm_node_id):
return IMPL.bm_node_destroy(context, bm_node_id)
def bm_node_update(context, bm_node_id, values):
return IMPL.bm_node_update(context, bm_node_id, values)
def bm_node_set_uuid_safe(context, bm_node_id, uuid):
return IMPL.bm_node_set_uuid_safe(context, bm_node_id, uuid)
def bm_pxe_ip_create(context, address, server_address):
return IMPL.bm_pxe_ip_create(context, address, server_address)
def bm_pxe_ip_create_direct(context, bm_pxe_ip):
return IMPL.bm_pxe_ip_create_direct(context, bm_pxe_ip)
def bm_pxe_ip_destroy(context, ip_id):
return IMPL.bm_pxe_ip_destroy(context, ip_id)
def bm_pxe_ip_destroy_by_address(context, address):
return IMPL.bm_pxe_ip_destroy_by_address(context, address)
def bm_pxe_ip_get_all(context):
return IMPL.bm_pxe_ip_get_all(context)
def bm_pxe_ip_get(context, ip_id):
return IMPL.bm_pxe_ip_get(context, ip_id)
def bm_pxe_ip_get_by_bm_node_id(context, bm_node_id):
return IMPL.bm_pxe_ip_get_by_bm_node_id(context, bm_node_id)
def bm_pxe_ip_associate(context, bm_node_id):
return IMPL.bm_pxe_ip_associate(context, bm_node_id)
def bm_pxe_ip_disassociate(context, bm_node_id):
return IMPL.bm_pxe_ip_disassociate(context, bm_node_id)
def bm_interface_get(context, if_id):
return IMPL.bm_interface_get(context, if_id)
def bm_interface_get_all(context):
return IMPL.bm_interface_get_all(context)
def bm_interface_destroy(context, if_id):
return IMPL.bm_interface_destroy(context, if_id)
def bm_interface_create(context, bm_node_id, address, datapath_id, port_no):
return IMPL.bm_interface_create(context, bm_node_id, address,
datapath_id, port_no)
def bm_interface_set_vif_uuid(context, if_id, vif_uuid):
return IMPL.bm_interface_set_vif_uuid(context, if_id, vif_uuid)
def bm_interface_get_by_vif_uuid(context, vif_uuid):
return IMPL.bm_interface_get_by_vif_uuid(context, vif_uuid)
def bm_interface_get_all_by_bm_node_id(context, bm_node_id):
return IMPL.bm_interface_get_all_by_bm_node_id(context, bm_node_id)
def bm_deployment_create(context, key, image_path, pxe_config_path, root_mb,
swap_mb):
return IMPL.bm_deployment_create(context, key, image_path,
pxe_config_path, root_mb, swap_mb)
def bm_deployment_get(context, dep_id):
return IMPL.bm_deployment_get(context, dep_id)
def bm_deployment_destroy(context, dep_id):
return IMPL.bm_deployment_destroy(context, dep_id)
| {
"content_hash": "5b181384c94136d58c030185868c3958",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 79,
"avg_line_length": 30.37888198757764,
"alnum_prop": 0.6689838478838683,
"repo_name": "fajoy/nova",
"id": "206a59b4f539e7d6aac0a56703fb554a407cec30",
"size": "5768",
"binary": false,
"copies": "2",
"ref": "refs/heads/grizzly-2",
"path": "nova/virt/baremetal/db/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "7567423"
},
{
"name": "Shell",
"bytes": "15428"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import uuid
from random import randint, random
from moto.core import BaseBackend, BaseModel
from moto.ec2 import ec2_backends
from copy import copy
class BaseObject(BaseModel):
def camelCase(self, key):
words = []
for i, word in enumerate(key.split('_')):
if i > 0:
words.append(word.title())
else:
words.append(word)
return ''.join(words)
def gen_response_object(self):
response_object = copy(self.__dict__)
for key, value in response_object.items():
if '_' in key:
response_object[self.camelCase(key)] = value
del response_object[key]
return response_object
@property
def response_object(self):
return self.gen_response_object()
class Cluster(BaseObject):
def __init__(self, cluster_name):
self.active_services_count = 0
self.arn = 'arn:aws:ecs:us-east-1:012345678910:cluster/{0}'.format(
cluster_name)
self.name = cluster_name
self.pending_tasks_count = 0
self.registered_container_instances_count = 0
self.running_tasks_count = 0
self.status = 'ACTIVE'
@property
def physical_resource_id(self):
return self.name
@property
def response_object(self):
response_object = self.gen_response_object()
response_object['clusterArn'] = self.arn
response_object['clusterName'] = self.name
del response_object['arn'], response_object['name']
return response_object
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
ecs_backend = ecs_backends[region_name]
return ecs_backend.create_cluster(
# ClusterName is optional in CloudFormation, thus create a random
# name if necessary
cluster_name=properties.get(
'ClusterName', 'ecscluster{0}'.format(int(random() * 10 ** 6))),
)
@classmethod
def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
if original_resource.name != properties['ClusterName']:
ecs_backend = ecs_backends[region_name]
ecs_backend.delete_cluster(original_resource.arn)
return ecs_backend.create_cluster(
# ClusterName is optional in CloudFormation, thus create a
# random name if necessary
cluster_name=properties.get(
'ClusterName', 'ecscluster{0}'.format(int(random() * 10 ** 6))),
)
else:
# no-op when nothing changed between old and new resources
return original_resource
class TaskDefinition(BaseObject):
def __init__(self, family, revision, container_definitions, volumes=None):
self.family = family
self.arn = 'arn:aws:ecs:us-east-1:012345678910:task-definition/{0}:{1}'.format(
family, revision)
self.container_definitions = container_definitions
if volumes is None:
self.volumes = []
else:
self.volumes = volumes
@property
def response_object(self):
response_object = self.gen_response_object()
response_object['taskDefinitionArn'] = response_object['arn']
del response_object['arn']
return response_object
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
family = properties.get(
'Family', 'task-definition-{0}'.format(int(random() * 10 ** 6)))
container_definitions = properties['ContainerDefinitions']
volumes = properties['Volumes']
ecs_backend = ecs_backends[region_name]
return ecs_backend.register_task_definition(
family=family, container_definitions=container_definitions, volumes=volumes)
@classmethod
def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
family = properties.get(
'Family', 'task-definition-{0}'.format(int(random() * 10 ** 6)))
container_definitions = properties['ContainerDefinitions']
volumes = properties['Volumes']
if (original_resource.family != family or
original_resource.container_definitions != container_definitions or
original_resource.volumes != volumes):
# currently TaskRoleArn isn't stored at TaskDefinition
# instances
ecs_backend = ecs_backends[region_name]
ecs_backend.deregister_task_definition(original_resource.arn)
return ecs_backend.register_task_definition(
family=family, container_definitions=container_definitions, volumes=volumes)
else:
# no-op when nothing changed between old and new resources
return original_resource
class Task(BaseObject):
def __init__(self, cluster, task_definition, container_instance_arn, overrides={}, started_by=''):
self.cluster_arn = cluster.arn
self.task_arn = 'arn:aws:ecs:us-east-1:012345678910:task/{0}'.format(
str(uuid.uuid1()))
self.container_instance_arn = container_instance_arn
self.last_status = 'RUNNING'
self.desired_status = 'RUNNING'
self.task_definition_arn = task_definition.arn
self.overrides = overrides
self.containers = []
self.started_by = started_by
self.stopped_reason = ''
@property
def response_object(self):
response_object = self.gen_response_object()
return response_object
class Service(BaseObject):
def __init__(self, cluster, service_name, task_definition, desired_count):
self.cluster_arn = cluster.arn
self.arn = 'arn:aws:ecs:us-east-1:012345678910:service/{0}'.format(
service_name)
self.name = service_name
self.status = 'ACTIVE'
self.running_count = 0
self.task_definition = task_definition.arn
self.desired_count = desired_count
self.events = []
self.load_balancers = []
self.pending_count = 0
@property
def physical_resource_id(self):
return self.arn
@property
def response_object(self):
response_object = self.gen_response_object()
del response_object['name'], response_object['arn']
response_object['serviceName'] = self.name
response_object['serviceArn'] = self.arn
return response_object
@classmethod
def create_from_cloudformation_json(cls, resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
if isinstance(properties['Cluster'], Cluster):
cluster = properties['Cluster'].name
else:
cluster = properties['Cluster']
if isinstance(properties['TaskDefinition'], TaskDefinition):
task_definition = properties['TaskDefinition'].family
else:
task_definition = properties['TaskDefinition']
service_name = '{0}Service{1}'.format(cluster, int(random() * 10 ** 6))
desired_count = properties['DesiredCount']
# TODO: LoadBalancers
# TODO: Role
ecs_backend = ecs_backends[region_name]
return ecs_backend.create_service(
cluster, service_name, task_definition, desired_count)
@classmethod
def update_from_cloudformation_json(cls, original_resource, new_resource_name, cloudformation_json, region_name):
properties = cloudformation_json['Properties']
if isinstance(properties['Cluster'], Cluster):
cluster_name = properties['Cluster'].name
else:
cluster_name = properties['Cluster']
if isinstance(properties['TaskDefinition'], TaskDefinition):
task_definition = properties['TaskDefinition'].family
else:
task_definition = properties['TaskDefinition']
desired_count = properties['DesiredCount']
ecs_backend = ecs_backends[region_name]
service_name = original_resource.name
if original_resource.cluster_arn != Cluster(cluster_name).arn:
# TODO: LoadBalancers
# TODO: Role
ecs_backend.delete_service(cluster_name, service_name)
new_service_name = '{0}Service{1}'.format(
cluster_name, int(random() * 10 ** 6))
return ecs_backend.create_service(
cluster_name, new_service_name, task_definition, desired_count)
else:
return ecs_backend.update_service(cluster_name, service_name, task_definition, desired_count)
class ContainerInstance(BaseObject):
def __init__(self, ec2_instance_id):
self.ec2_instance_id = ec2_instance_id
self.status = 'ACTIVE'
self.registeredResources = []
self.agentConnected = True
self.containerInstanceArn = "arn:aws:ecs:us-east-1:012345678910:container-instance/{0}".format(
str(uuid.uuid1()))
self.pendingTaskCount = 0
self.remainingResources = []
self.runningTaskCount = 0
self.versionInfo = {
'agentVersion': "1.0.0",
'agentHash': '4023248',
'dockerVersion': 'DockerVersion: 1.5.0'
}
@property
def response_object(self):
response_object = self.gen_response_object()
del response_object['name'], response_object['arn']
return response_object
class ContainerInstanceFailure(BaseObject):
def __init__(self, reason, container_instance_id):
self.reason = reason
self.arn = "arn:aws:ecs:us-east-1:012345678910:container-instance/{0}".format(
container_instance_id)
@property
def response_object(self):
response_object = self.gen_response_object()
response_object['reason'] = self.reason
response_object['arn'] = self.arn
return response_object
class EC2ContainerServiceBackend(BaseBackend):
def __init__(self):
self.clusters = {}
self.task_definitions = {}
self.tasks = {}
self.services = {}
self.container_instances = {}
def describe_task_definition(self, task_definition_str):
task_definition_name = task_definition_str.split('/')[-1]
if ':' in task_definition_name:
family, revision = task_definition_name.split(':')
revision = int(revision)
else:
family = task_definition_name
revision = len(self.task_definitions.get(family, []))
if family in self.task_definitions and 0 < revision <= len(self.task_definitions[family]):
return self.task_definitions[family][revision - 1]
elif family in self.task_definitions and revision == -1:
return self.task_definitions[family][revision]
else:
raise Exception(
"{0} is not a task_definition".format(task_definition_name))
def create_cluster(self, cluster_name):
cluster = Cluster(cluster_name)
self.clusters[cluster_name] = cluster
return cluster
def list_clusters(self):
"""
maxSize and pagination not implemented
"""
return [cluster.arn for cluster in self.clusters.values()]
def describe_clusters(self, list_clusters_name=None):
list_clusters = []
if list_clusters_name is None:
if 'default' in self.clusters:
list_clusters.append(self.clusters['default'].response_object)
else:
for cluster in list_clusters_name:
cluster_name = cluster.split('/')[-1]
if cluster_name in self.clusters:
list_clusters.append(
self.clusters[cluster_name].response_object)
else:
raise Exception(
"{0} is not a cluster".format(cluster_name))
return list_clusters
def delete_cluster(self, cluster_str):
cluster_name = cluster_str.split('/')[-1]
if cluster_name in self.clusters:
return self.clusters.pop(cluster_name)
else:
raise Exception("{0} is not a cluster".format(cluster_name))
def register_task_definition(self, family, container_definitions, volumes):
if family in self.task_definitions:
revision = len(self.task_definitions[family]) + 1
else:
self.task_definitions[family] = []
revision = 1
task_definition = TaskDefinition(
family, revision, container_definitions, volumes)
self.task_definitions[family].append(task_definition)
return task_definition
def list_task_definitions(self):
"""
Filtering not implemented
"""
task_arns = []
for task_definition_list in self.task_definitions.values():
task_arns.extend(
[task_definition.arn for task_definition in task_definition_list])
return task_arns
def deregister_task_definition(self, task_definition_str):
task_definition_name = task_definition_str.split('/')[-1]
family, revision = task_definition_name.split(':')
revision = int(revision)
if family in self.task_definitions and 0 < revision <= len(self.task_definitions[family]):
return self.task_definitions[family].pop(revision - 1)
else:
raise Exception(
"{0} is not a task_definition".format(task_definition_name))
def run_task(self, cluster_str, task_definition_str, count, overrides, started_by):
cluster_name = cluster_str.split('/')[-1]
if cluster_name in self.clusters:
cluster = self.clusters[cluster_name]
else:
raise Exception("{0} is not a cluster".format(cluster_name))
task_definition = self.describe_task_definition(task_definition_str)
if cluster_name not in self.tasks:
self.tasks[cluster_name] = {}
tasks = []
container_instances = list(
self.container_instances.get(cluster_name, {}).keys())
if not container_instances:
raise Exception(
"No instances found in cluster {}".format(cluster_name))
active_container_instances = [x for x in container_instances if
self.container_instances[cluster_name][x].status == 'ACTIVE']
for _ in range(count or 1):
container_instance_arn = self.container_instances[cluster_name][
active_container_instances[randint(0, len(active_container_instances) - 1)]
].containerInstanceArn
task = Task(cluster, task_definition, container_instance_arn,
overrides or {}, started_by or '')
tasks.append(task)
self.tasks[cluster_name][task.task_arn] = task
return tasks
def start_task(self, cluster_str, task_definition_str, container_instances, overrides, started_by):
cluster_name = cluster_str.split('/')[-1]
if cluster_name in self.clusters:
cluster = self.clusters[cluster_name]
else:
raise Exception("{0} is not a cluster".format(cluster_name))
task_definition = self.describe_task_definition(task_definition_str)
if cluster_name not in self.tasks:
self.tasks[cluster_name] = {}
tasks = []
if not container_instances:
raise Exception("No container instance list provided")
container_instance_ids = [x.split('/')[-1]
for x in container_instances]
for container_instance_id in container_instance_ids:
container_instance_arn = self.container_instances[cluster_name][
container_instance_id
].containerInstanceArn
task = Task(cluster, task_definition, container_instance_arn,
overrides or {}, started_by or '')
tasks.append(task)
self.tasks[cluster_name][task.task_arn] = task
return tasks
def describe_tasks(self, cluster_str, tasks):
cluster_name = cluster_str.split('/')[-1]
if cluster_name in self.clusters:
cluster = self.clusters[cluster_name]
else:
raise Exception("{0} is not a cluster".format(cluster_name))
if not tasks:
raise Exception("tasks cannot be empty")
response = []
for cluster, cluster_tasks in self.tasks.items():
for task_id, task in cluster_tasks.items():
if task_id in tasks or task.task_arn in tasks:
response.append(task)
return response
def list_tasks(self, cluster_str, container_instance, family, started_by, service_name, desiredStatus):
filtered_tasks = []
for cluster, tasks in self.tasks.items():
for arn, task in tasks.items():
filtered_tasks.append(task)
if cluster_str:
cluster_name = cluster_str.split('/')[-1]
if cluster_name not in self.clusters:
raise Exception("{0} is not a cluster".format(cluster_name))
filtered_tasks = list(
filter(lambda t: cluster_name in t.cluster_arn, filtered_tasks))
if container_instance:
filtered_tasks = list(filter(
lambda t: container_instance in t.container_instance_arn, filtered_tasks))
if started_by:
filtered_tasks = list(
filter(lambda t: started_by == t.started_by, filtered_tasks))
return [t.task_arn for t in filtered_tasks]
def stop_task(self, cluster_str, task_str, reason):
cluster_name = cluster_str.split('/')[-1]
if cluster_name not in self.clusters:
raise Exception("{0} is not a cluster".format(cluster_name))
if not task_str:
raise Exception("A task ID or ARN is required")
task_id = task_str.split('/')[-1]
tasks = self.tasks.get(cluster_name, None)
if not tasks:
raise Exception(
"Cluster {} has no registered tasks".format(cluster_name))
for task in tasks.keys():
if task.endswith(task_id):
tasks[task].last_status = 'STOPPED'
tasks[task].desired_status = 'STOPPED'
tasks[task].stopped_reason = reason
return tasks[task]
raise Exception("Could not find task {} on cluster {}".format(
task_str, cluster_name))
def create_service(self, cluster_str, service_name, task_definition_str, desired_count):
cluster_name = cluster_str.split('/')[-1]
if cluster_name in self.clusters:
cluster = self.clusters[cluster_name]
else:
raise Exception("{0} is not a cluster".format(cluster_name))
task_definition = self.describe_task_definition(task_definition_str)
desired_count = desired_count if desired_count is not None else 0
service = Service(cluster, service_name,
task_definition, desired_count)
cluster_service_pair = '{0}:{1}'.format(cluster_name, service_name)
self.services[cluster_service_pair] = service
return service
def list_services(self, cluster_str):
cluster_name = cluster_str.split('/')[-1]
service_arns = []
for key, value in self.services.items():
if cluster_name + ':' in key:
service_arns.append(self.services[key].arn)
return sorted(service_arns)
def describe_services(self, cluster_str, service_names_or_arns):
cluster_name = cluster_str.split('/')[-1]
result = []
for existing_service_name, existing_service_obj in sorted(self.services.items()):
for requested_name_or_arn in service_names_or_arns:
cluster_service_pair = '{0}:{1}'.format(
cluster_name, requested_name_or_arn)
if cluster_service_pair == existing_service_name or existing_service_obj.arn == requested_name_or_arn:
result.append(existing_service_obj)
return result
def update_service(self, cluster_str, service_name, task_definition_str, desired_count):
cluster_name = cluster_str.split('/')[-1]
cluster_service_pair = '{0}:{1}'.format(cluster_name, service_name)
if cluster_service_pair in self.services:
if task_definition_str is not None:
self.describe_task_definition(task_definition_str)
self.services[
cluster_service_pair].task_definition = task_definition_str
if desired_count is not None:
self.services[
cluster_service_pair].desired_count = desired_count
return self.services[cluster_service_pair]
else:
raise Exception("cluster {0} or service {1} does not exist".format(
cluster_name, service_name))
def delete_service(self, cluster_name, service_name):
cluster_service_pair = '{0}:{1}'.format(cluster_name, service_name)
if cluster_service_pair in self.services:
service = self.services[cluster_service_pair]
if service.desired_count > 0:
raise Exception("Service must have desiredCount=0")
else:
return self.services.pop(cluster_service_pair)
else:
raise Exception("cluster {0} or service {1} does not exist".format(
cluster_name, service_name))
def register_container_instance(self, cluster_str, ec2_instance_id):
cluster_name = cluster_str.split('/')[-1]
if cluster_name not in self.clusters:
raise Exception("{0} is not a cluster".format(cluster_name))
container_instance = ContainerInstance(ec2_instance_id)
if not self.container_instances.get(cluster_name):
self.container_instances[cluster_name] = {}
container_instance_id = container_instance.containerInstanceArn.split(
'/')[-1]
self.container_instances[cluster_name][
container_instance_id] = container_instance
return container_instance
def list_container_instances(self, cluster_str):
cluster_name = cluster_str.split('/')[-1]
container_instances_values = self.container_instances.get(
cluster_name, {}).values()
container_instances = [
ci.containerInstanceArn for ci in container_instances_values]
return sorted(container_instances)
def describe_container_instances(self, cluster_str, list_container_instance_ids):
cluster_name = cluster_str.split('/')[-1]
if cluster_name not in self.clusters:
raise Exception("{0} is not a cluster".format(cluster_name))
failures = []
container_instance_objects = []
for container_instance_id in list_container_instance_ids:
container_instance = self.container_instances[
cluster_name].get(container_instance_id, None)
if container_instance is not None:
container_instance_objects.append(container_instance)
else:
failures.append(ContainerInstanceFailure(
'MISSING', container_instance_id))
return container_instance_objects, failures
def update_container_instances_state(self, cluster_str, list_container_instance_ids, status):
cluster_name = cluster_str.split('/')[-1]
if cluster_name not in self.clusters:
raise Exception("{0} is not a cluster".format(cluster_name))
status = status.upper()
if status not in ['ACTIVE', 'DRAINING']:
raise Exception("An error occurred (InvalidParameterException) when calling the UpdateContainerInstancesState operation: Container instances status should be one of [ACTIVE,DRAINING]")
failures = []
container_instance_objects = []
for container_instance_id in list_container_instance_ids:
container_instance = self.container_instances[cluster_name].get(container_instance_id, None)
if container_instance is not None:
container_instance.status = status
container_instance_objects.append(container_instance)
else:
failures.append(ContainerInstanceFailure('MISSING', container_instance_id))
return container_instance_objects, failures
def deregister_container_instance(self, cluster_str, container_instance_str):
pass
ecs_backends = {}
for region, ec2_backend in ec2_backends.items():
ecs_backends[region] = EC2ContainerServiceBackend()
| {
"content_hash": "a9927a2b2df6fc8897aed9245a58e316",
"timestamp": "",
"source": "github",
"line_count": 604,
"max_line_length": 196,
"avg_line_length": 41.76655629139073,
"alnum_prop": 0.6150156578269315,
"repo_name": "gjtempleton/moto",
"id": "e5a2e9f96dc76866a125210af6502c2b78dd46fc",
"size": "25227",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "moto/ecs/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5848"
},
{
"name": "Java",
"bytes": "1688"
},
{
"name": "JavaScript",
"bytes": "756"
},
{
"name": "Makefile",
"bytes": "630"
},
{
"name": "Python",
"bytes": "2603223"
},
{
"name": "Ruby",
"bytes": "188"
}
],
"symlink_target": ""
} |
"""Integration tests for Keras."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.keras.python import keras
from tensorflow.contrib.keras.python.keras import testing_utils
from tensorflow.python.platform import test
class KerasIntegrationTest(test.TestCase):
def test_vector_classification_declarative(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(8,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
model = keras.models.Sequential([
keras.layers.Dense(8,
activation='relu',
input_shape=x_train.shape[1:]),
keras.layers.Dropout(0.1),
keras.layers.Dense(y_train.shape[-1], activation='softmax')
])
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertTrue(history.history['val_acc'][-1] > 0.85)
def test_vector_classification_functional(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(8,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
inputs = keras.layers.Input(shape=x_train.shape[1:])
x = keras.layers.Dense(8, activation='relu')(inputs)
x = keras.layers.Dropout(0.1)(x)
outputs = keras.layers.Dense(y_train.shape[-1], activation='softmax')(x)
model = keras.models.Model(inputs, outputs)
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertTrue(history.history['val_acc'][-1] > 0.85)
def test_temporal_classification_declarative(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(4, 8),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
model = keras.models.Sequential()
model.add(keras.layers.LSTM(3, return_sequences=True,
input_shape=x_train.shape[1:]))
model.add(keras.layers.GRU(y_train.shape[-1], activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertTrue(history.history['val_acc'][-1] > 0.85)
def test_image_classification_declarative(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(8, 8, 3),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
model = keras.models.Sequential()
model.add(keras.layers.Conv2D(
8, 3,
activation='relu',
input_shape=x_train.shape[1:]))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Conv2D(
8, 3,
padding='same',
activation='relu'))
model.add(keras.layers.GlobalMaxPooling2D())
model.add(keras.layers.Dense(y_train.shape[-1], activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertTrue(history.history['val_acc'][-1] > 0.85)
def test_video_classification_functional(self):
with self.test_session():
np.random.seed(1337)
(x_train, y_train), (x_test, y_test) = testing_utils.get_test_data(
train_samples=200,
test_samples=100,
input_shape=(4, 8, 8, 3),
num_classes=3)
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
inputs = keras.layers.Input(shape=x_train.shape[1:])
x = keras.layers.TimeDistributed(
keras.layers.Conv2D(4, 3, activation='relu'))(inputs)
x = keras.layers.BatchNormalization()(x)
x = keras.layers.TimeDistributed(keras.layers.GlobalMaxPooling2D())(x)
x = keras.layers.Conv1D(8, 3, activation='relu')(x)
x = keras.layers.Flatten()(x)
outputs = keras.layers.Dense(y_train.shape[-1], activation='softmax')(x)
model = keras.models.Model(inputs, outputs)
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizers.SGD(lr=0.01, momentum=0.8),
metrics=['accuracy'])
history = model.fit(x_train, y_train, epochs=10, batch_size=16,
validation_data=(x_test, y_test),
verbose=2)
self.assertTrue(history.history['val_acc'][-1] > 0.85)
if __name__ == '__main__':
test.main()
| {
"content_hash": "9461519bb621eea2634d184deaf5366e",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 78,
"avg_line_length": 39.776315789473685,
"alnum_prop": 0.5866688719814753,
"repo_name": "taknevski/tensorflow-xsmm",
"id": "f42f81b286ed5216188cfafbe189100f7f78c59d",
"size": "6735",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/keras/python/keras/integration_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7583"
},
{
"name": "C",
"bytes": "177255"
},
{
"name": "C++",
"bytes": "22491110"
},
{
"name": "CMake",
"bytes": "138415"
},
{
"name": "CSS",
"bytes": "774"
},
{
"name": "Go",
"bytes": "789070"
},
{
"name": "HTML",
"bytes": "589450"
},
{
"name": "Java",
"bytes": "286514"
},
{
"name": "JavaScript",
"bytes": "13906"
},
{
"name": "Jupyter Notebook",
"bytes": "1833636"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "37227"
},
{
"name": "Objective-C",
"bytes": "7056"
},
{
"name": "Objective-C++",
"bytes": "64656"
},
{
"name": "Protocol Buffer",
"bytes": "208667"
},
{
"name": "Python",
"bytes": "19803265"
},
{
"name": "Shell",
"bytes": "334502"
},
{
"name": "TypeScript",
"bytes": "786868"
}
],
"symlink_target": ""
} |
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.core.urlresolvers import reverse
from django.views.decorators.http import require_POST
from django.contrib.auth.decorators import login_required
from artists.models import *
from artists.forms import *
from extra.shortcuts import auth_or_403
# list, detail, create, update, delete (new form, edit form)
@login_required
def artists_get(request):
artists = Artist.objects.all()
form = ArtistForm()
return render( request, 'artists/get.html', {'artists': artists,
'form': form})
def artists_get_detail(request, artist_id):
artist = get_object_or_404(Artist, pk=artist_id)
has_perm = request.user.has_perm('artists.read_artist', artist)
return render(
request,
'artists/get_detail.html',
{
'artist': artist,
'has_perm': has_perm,
}
)
@login_required
@require_POST
def artists_post(request):
form = ArtistForm(request.POST)
if form.is_valid():
artist = form.save(commit=False)
artist.user = request.user
artist.save()
# redirect
return HttpResponseRedirect(reverse('artists_get'))
else:
return render(request, 'artists/new.html', {'form': form})
@login_required()
@require_POST
def artists_put(request, artist_id):
# Create a form to edit an existing Article, but use POST data to populate
# the form.
artist = get_object_or_404(Artist, pk=artist_id)
auth_or_403(request, 'artists.change_artist', artist)
form = ArtistForm(request.POST, instance=artist)
if form.is_valid():
form.save()
# redirect to lists or detail
return HttpResponseRedirect(reverse('artists_get'))
else:
return render(
request,
'artists/edit.html',
{
'form': form,
'artist_id': artist_id,
}
)
@login_required()
@require_POST
def artists_delete(request, artist_id):
artist = get_object_or_404(Artist, pk=artist_id)
auth_or_403(request, 'artists.delete_artist', artist)
artist.delete()
return HttpResponseRedirect(reverse('artists_get'))
@login_required()
def artists_new(request):
#form = ArtistForm(
# initial = {
# 'user': request.user.id
# },
#)
form = ArtistForm()
return render(
request,
'artists/new.html',
{
'form': form,
}
)
@login_required()
def artists_edit(request, artist_id):
artist = get_object_or_404(Artist, pk=artist_id)
auth_or_403(request, 'artists.change_artist', artist)
form = ArtistForm(instance=artist)
return render(
request,
'artists/edit.html',
{
'form': form,
'artist': artist
}
)
def albums_form_new(request, artist_id):
#form = AlbumForm(
# initial ={
# 'artist': artist_id
# }
#)
# form.artist = artist_id
# form.artist = artist
#return HttpResponse(artist)
#return HttpResponse(serializers.serialize('json', artist))
artist = get_object_or_404(Artist, pk=artist_id)
form = AlbumForm()
return render(
request,
'albums/form_new.html',
{
'form': form,
'artist': artist,
}
)
@login_required
@require_POST
def albums_post(request, artist_id):
#form = AlbumForm(
# request.POST,
# initial ={
# 'user': request.user.id
# }
#)
artist = get_object_or_404(Artist, pk=artist_id)
form = AlbumForm(request.POST)
if form.is_valid():
album = form.save(commit=False)
album.artist = artist
album.user = request.user
album.save()
# redirect
return HttpResponseRedirect(reverse('artists_get'))
else:
return render(
request,
'albums/form_new.html',
{
'form': form,
'artist': artist
}
)
def albums_get(request, artist_id):
return None | {
"content_hash": "0a0c2427284369b30613b853bad5032b",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 78,
"avg_line_length": 24.816568047337277,
"alnum_prop": 0.5834525512637101,
"repo_name": "cnanders/test-cxro-tickets",
"id": "4842a788ef7fcfb93f8e7bb500a9675fbbf09bc4",
"size": "4221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "artists/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "66962"
}
],
"symlink_target": ""
} |
"""
Minimally improved noise for python. Pragmatic, minimal improved logging for python.
* Provides two separate log levels.
* One log level for dependencies and one for the application.
* Improved default format string.
"""
from __future__ import absolute_import
import logging
import sys
from . import version
__title__ = 'threepio'
__version__ = version.get_version(form='short')
__author__ = 'J. Matt Peterson'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013-2014 J. Matt Peterson'
logger = None
VERSION = version.VERSION
version = version.get_version(form='verbose')
LOGGER_NAME = "threepio"
LOG_FILENAME = "./threepio.log"
APP_LOGGING_LEVEL = logging.DEBUG
DEP_LOGGING_LEVEL = logging.INFO
def initialize(logger_name=LOGGER_NAME,
log_filename=LOG_FILENAME,
app_logging_level=APP_LOGGING_LEVEL,
dep_logging_level=DEP_LOGGING_LEVEL,
format=None,
logger_class=None,
handlers=[],
global_logger=True):
"""
Constructs and initializes a `logging.Logger` object.
Returns :class:`logging.Logger` object.
:param logger_name: name of the new logger.
:param log_filename: The log file location :class:`str` or None.
:param app_logging_level: The logging level to use for the application.
:param dep_logging_level: The logging level to use for dependencies.
:param format: The format string to use :class: `str` or None.
:param logger_class: The logger class to use
:param handlers: List of handler instances to add.
:param global_logger: If true set threepio's global logger variable to this logger.
"""
# If there is no format, use a default format.
if not format:
format = "%(asctime)s %(name)s-%(levelname)s "\
+ "[%(pathname)s %(lineno)d] %(message)s"
formatter = logging.Formatter(format)
# Setup the root logging for dependencies, etc.
if log_filename:
logging.basicConfig(
level=dep_logging_level,
format=format,
filename=log_filename,
filemode='a+')
else:
logging.basicConfig(
level=dep_logging_level,
format=format)
# Setup and add separate application logging.
if logger_class:
original_class = logging.getLoggerClass()
logging.setLoggerClass(logger_class)
new_logger = logging.getLogger(logger_name)
logging.setLoggerClass(original_class)
else:
new_logger = logging.getLogger(logger_name)
# Set the app logging level.
new_logger.setLevel(app_logging_level) # required to get level to apply.
# Set the global_logger by default.
if global_logger:
global logger
logger = new_logger
for handler in handlers:
handler.setFormatter(formatter)
handler.setLevel(app_logging_level)
new_logger.addHandler(handler)
return new_logger
| {
"content_hash": "25abc9c6bcf9ca866656d2cab7efb486",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 87,
"avg_line_length": 31.827956989247312,
"alnum_prop": 0.6557432432432433,
"repo_name": "jmatt/threepio",
"id": "f7d881c91d9a4491a7559bc4a522f4b7883727d2",
"size": "2960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "threepio/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5782"
}
],
"symlink_target": ""
} |
"""
Add first_answer_email_sent and first_l10n_email_sent fields to Profile.
"""
from __future__ import unicode_literals
from django.db import models, migrations
import kitsune.sumo.models # noqa
class Migration(migrations.Migration):
dependencies = [
('users', '0003_auto_20150430_1304'),
]
operations = [
migrations.AddField(
model_name='profile',
name='first_answer_email_sent',
field=models.BooleanField(default=False, help_text='Has been sent a first answer contribution email.'),
preserve_default=True,
),
migrations.AddField(
model_name='profile',
name='first_l10n_email_sent',
field=models.BooleanField(default=False, help_text='Has been sent a first l10n contribution email.'),
preserve_default=True,
),
]
| {
"content_hash": "be12b65bfdedb192f3ccd96b26688256",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 115,
"avg_line_length": 30.103448275862068,
"alnum_prop": 0.6265750286368843,
"repo_name": "mozilla/kitsune",
"id": "7a2a5de96c3f034e871b916c8afd65e09e61f126",
"size": "897",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "kitsune/users/migrations/0004_auto_add_contrib_email_flags.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1156"
},
{
"name": "Dockerfile",
"bytes": "3027"
},
{
"name": "HTML",
"bytes": "535448"
},
{
"name": "JavaScript",
"bytes": "658477"
},
{
"name": "Jinja",
"bytes": "4837"
},
{
"name": "Makefile",
"bytes": "2193"
},
{
"name": "Nunjucks",
"bytes": "68656"
},
{
"name": "Python",
"bytes": "2827116"
},
{
"name": "SCSS",
"bytes": "240092"
},
{
"name": "Shell",
"bytes": "10759"
},
{
"name": "Svelte",
"bytes": "26864"
}
],
"symlink_target": ""
} |
import os.path
from django import forms
from django.contrib.admin.helpers import ActionForm
from django.utils.translation import gettext_lazy as _
class ImportForm(forms.Form):
import_file = forms.FileField(
label=_('File to import')
)
input_format = forms.ChoiceField(
label=_('Format'),
choices=(),
)
def __init__(self, import_formats, *args, **kwargs):
super().__init__(*args, **kwargs)
choices = []
for i, f in enumerate(import_formats):
choices.append((str(i), f().get_title(),))
if len(import_formats) > 1:
choices.insert(0, ('', '---'))
self.fields['input_format'].choices = choices
class ConfirmImportForm(forms.Form):
import_file_name = forms.CharField(widget=forms.HiddenInput())
original_file_name = forms.CharField(widget=forms.HiddenInput())
input_format = forms.CharField(widget=forms.HiddenInput())
def clean_import_file_name(self):
data = self.cleaned_data['import_file_name']
data = os.path.basename(data)
return data
class ExportForm(forms.Form):
file_format = forms.ChoiceField(
label=_('Format'),
choices=(),
)
def __init__(self, formats, *args, **kwargs):
super().__init__(*args, **kwargs)
choices = []
for i, f in enumerate(formats):
choices.append((str(i), f().get_title(),))
if len(formats) > 1:
choices.insert(0, ('', '---'))
self.fields['file_format'].choices = choices
def export_action_form_factory(formats):
"""
Returns an ActionForm subclass containing a ChoiceField populated with
the given formats.
"""
class _ExportActionForm(ActionForm):
"""
Action form with export format ChoiceField.
"""
file_format = forms.ChoiceField(
label=_('Format'), choices=formats, required=False)
_ExportActionForm.__name__ = str('ExportActionForm')
return _ExportActionForm
| {
"content_hash": "60bd9f2c992c9aafd852b42fbe02b9ef",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 74,
"avg_line_length": 29.36231884057971,
"alnum_prop": 0.6031589338598223,
"repo_name": "bmihelac/django-import-export",
"id": "76dbaf75178ae8b43ea753a77f8320bbab778311",
"size": "2026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "import_export/forms.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "8297"
},
{
"name": "JavaScript",
"bytes": "777"
},
{
"name": "Python",
"bytes": "184539"
},
{
"name": "Shell",
"bytes": "79"
}
],
"symlink_target": ""
} |
'''
Created on 11.06.2015
@author: marscher
'''
import warnings
import unittest
import os
import sys
from pyemma.util.config import readConfiguration
from pyemma.util.files import TemporaryDirectory
class TestConfig(unittest.TestCase):
@unittest.skipIf(sys.platform == 'win32', 'unix based test')
def test_can_not_create_cfg_dir(self):
os.environ['HOME'] = '/dev/null'
exp_homedir = os.path.expanduser('~')
assert exp_homedir == '/dev/null'
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
readConfiguration()
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "could not create" in str(w[-1].message)
@unittest.skipIf(sys.platform == 'win32', 'unix based test')
def test_non_writeable_cfg_dir(self):
with TemporaryDirectory() as tmp:
cfg_dir = os.path.join(tmp, '.pyemma')
os.mkdir(cfg_dir)
os.environ['HOME'] = tmp
# make cfg dir non-writeable
os.chmod(cfg_dir, 444)
exp_homedir = os.path.expanduser('~')
assert exp_homedir == tmp
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
readConfiguration()
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "is not writeable" in str(w[-1].message)
def test_shortcuts(self):
import pyemma
pyemma.util.config.show_progress_bars = False
def test_shortcuts2(self):
import pyemma
pyemma.config.show_progress_bars = 'True'
def test_shortcut3(self):
import pyemma
pyemma.config['show_progress_bars'] = 'True'
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "3ea6ba3801590f3bfb39c93c315b494b",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 64,
"avg_line_length": 29.81159420289855,
"alnum_prop": 0.5911521633446767,
"repo_name": "arokem/PyEMMA",
"id": "fec1b758e63b80c2cafeebd6de368f4bf4aa3168",
"size": "2057",
"binary": false,
"copies": "2",
"ref": "refs/heads/devel",
"path": "pyemma/util/tests/test_config.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "39352"
},
{
"name": "Python",
"bytes": "1138398"
}
],
"symlink_target": ""
} |
import logging
import pkg_resources
import six
from novaclient import exceptions
from novaclient import utils
logger = logging.getLogger(__name__)
_discovered_plugins = {}
def discover_auth_systems():
"""Discover the available auth-systems.
This won't take into account the old style auth-systems.
"""
ep_name = 'openstack.client.auth_plugin'
for ep in pkg_resources.iter_entry_points(ep_name):
try:
# FIXME(dhellmann): It would be better to use stevedore
# here, since it abstracts this difference in behavior
# between versions of setuptools, but this seemed like a
# more expedient fix.
if hasattr(ep, 'resolve') and hasattr(ep, 'require'):
auth_plugin = ep.resolve()
else:
auth_plugin = ep.load(require=False)
except (ImportError, pkg_resources.UnknownExtra, AttributeError) as e:
logger.debug("ERROR: Cannot load auth plugin %s" % ep.name)
logger.debug(e, exc_info=1)
else:
_discovered_plugins[ep.name] = auth_plugin
def load_auth_system_opts(parser):
"""Load options needed by the available auth-systems into a parser.
This function will try to populate the parser with options from the
available plugins.
"""
for name, auth_plugin in six.iteritems(_discovered_plugins):
add_opts_fn = getattr(auth_plugin, "add_opts", None)
if add_opts_fn:
group = parser.add_argument_group("Auth-system '%s' options" %
name)
add_opts_fn(group)
def load_plugin(auth_system):
if auth_system in _discovered_plugins:
return _discovered_plugins[auth_system]()
# NOTE(aloga): If we arrive here, the plugin will be an old-style one,
# so we have to create a fake AuthPlugin for it.
return DeprecatedAuthPlugin(auth_system)
class BaseAuthPlugin(object):
"""Base class for authentication plugins.
An authentication plugin needs to override at least the authenticate
method to be a valid plugin.
"""
def __init__(self):
self.opts = {}
def get_auth_url(self):
"""Return the auth url for the plugin (if any)."""
return None
@staticmethod
def add_opts(parser):
"""Populate and return the parser with the options for this plugin.
If the plugin does not need any options, it should return the same
parser untouched.
"""
return parser
def parse_opts(self, args):
"""Parse the actual auth-system options if any.
This method is expected to populate the attribute self.opts with a
dict containing the options and values needed to make authentication.
If the dict is empty, the client should assume that it needs the same
options as the 'keystone' auth system (i.e. os_username and
os_password).
Returns the self.opts dict.
"""
return self.opts
def authenticate(self, cls, auth_url):
"""Authenticate using plugin defined method."""
raise exceptions.AuthSystemNotFound(self.auth_system)
class DeprecatedAuthPlugin(object):
"""Class to mimic the AuthPlugin class for deprecated auth systems.
Old auth systems only define two entry points: openstack.client.auth_url
and openstack.client.authenticate. This class will load those entry points
into a class similar to a valid AuthPlugin.
"""
def __init__(self, auth_system):
self.auth_system = auth_system
def authenticate(cls, auth_url):
raise exceptions.AuthSystemNotFound(self.auth_system)
self.opts = {}
self.get_auth_url = lambda: None
self.authenticate = authenticate
self._load_endpoints()
def _load_endpoints(self):
ep_name = 'openstack.client.auth_url'
fn = utils._load_entry_point(ep_name, name=self.auth_system)
if fn:
self.get_auth_url = fn
ep_name = 'openstack.client.authenticate'
fn = utils._load_entry_point(ep_name, name=self.auth_system)
if fn:
self.authenticate = fn
def parse_opts(self, args):
return self.opts
| {
"content_hash": "8a7c9c2daa8cd6b2547b875cae35c2c2",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 78,
"avg_line_length": 31.776119402985074,
"alnum_prop": 0.6378581493658995,
"repo_name": "guptaankita/python-novaclient",
"id": "d729c4d5af46c2200eb81f43f5e9d303948b56b3",
"size": "4946",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "novaclient/auth_plugin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1060506"
},
{
"name": "Shell",
"bytes": "6360"
}
],
"symlink_target": ""
} |
"""Define RainMachine data models."""
from dataclasses import dataclass
from homeassistant.helpers.entity import EntityDescription
@dataclass
class RainMachineEntityDescriptionMixinApiCategory:
"""Define an entity description mixin to include an API category."""
api_category: str
@dataclass
class RainMachineEntityDescriptionMixinDataKey:
"""Define an entity description mixin to include a data payload key."""
data_key: str
@dataclass
class RainMachineEntityDescriptionMixinUid:
"""Define an entity description mixin to include an activity UID."""
uid: int
@dataclass
class RainMachineEntityDescription(
EntityDescription, RainMachineEntityDescriptionMixinApiCategory
):
"""Describe a RainMachine entity."""
| {
"content_hash": "40c24eb6c0c1d35084e3cfd754545bea",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 75,
"avg_line_length": 23.5625,
"alnum_prop": 0.7811671087533156,
"repo_name": "mezz64/home-assistant",
"id": "9ae99fe247ad9564bca2eb7bcc41b0d7256bc22a",
"size": "754",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/rainmachine/model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52481895"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
import json
import time
from dcos import (cosmos, errors, package, packagemanager, subcommand)
from shakedown.dcos.service import *
import shakedown
def _get_options(options_file=None):
""" Read in options_file as JSON.
:param options_file: filename to return
:type options_file: str
:return: options as dictionary
:rtype: dict
"""
if options_file is not None:
with open(options_file, 'r') as opt_file:
options = json.loads(opt_file.read())
else:
options = {}
return options
def _get_service_name(package_name, pkg):
labels = pkg.marathon_json({}).get('labels')
if 'DCOS_SERVICE_NAME' in labels:
return labels['DCOS_SERVICE_NAME']
else:
return package_name
def _get_package_manager():
""" Get an instance of Cosmos with the correct URL.
:return: Cosmos instance
:rtype: packagemanager.PackageManager
"""
return packagemanager.PackageManager(cosmos.get_cosmos_url())
def install_package(
package_name,
package_version=None,
service_name=None,
options_file=None,
options_json=None,
wait_for_completion=False,
timeout_sec=600,
expected_running_tasks=0
):
""" Install a package via the DC/OS library
:param package_name: name of the package
:type package_name: str
:param package_version: version of the package (defaults to latest)
:type package_version: str
:param service_name: unique service name for the package
:type service_name: str
:param options_file: filename that has options to use and is JSON format
:type options_file: str
:param options_json: dict that has options to use and is JSON format
:type options_json: dict
:param wait_for_completion: whether or not to wait for the app's deployment to complete
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:param expected_running_tasks: number of service tasks to check for, or zero to disable
:type expected_task_count: int
:return: True if installation was successful, False otherwise
:rtype: bool
"""
start = time.time()
if options_file:
options = _get_options(options_file)
elif options_json:
options = options_json
else:
options = {}
package_manager = _get_package_manager()
pkg = package_manager.get_package_version(package_name, package_version)
if package_version is None:
# Get the resolved version for logging below
package_version = 'auto:{}'.format(pkg.version())
if service_name is None:
# Get the service name from the marathon template
try:
labels = pkg.marathon_json(options).get('labels')
if 'DCOS_SERVICE_NAME' in labels:
service_name = labels['DCOS_SERVICE_NAME']
except errors.DCOSException as e:
pass
print('\n{}installing {} with service={} version={} options={}'.format(
shakedown.cli.helpers.fchr('>>'), package_name, service_name, package_version, options))
try:
# Print pre-install notes to console log
pre_install_notes = pkg.package_json().get('preInstallNotes')
if pre_install_notes:
print(pre_install_notes)
package_manager.install_app(pkg, options, service_name)
# Print post-install notes to console log
post_install_notes = pkg.package_json().get('postInstallNotes')
if post_install_notes:
print(post_install_notes)
# Optionally wait for the app's deployment to finish
if wait_for_completion:
print("\n{}waiting for {} deployment to complete...".format(
shakedown.cli.helpers.fchr('>>'), service_name))
if expected_running_tasks > 0 and service_name is not None:
wait_for_service_tasks_running(service_name, expected_running_tasks, timeout_sec)
app_id = pkg.marathon_json(options).get('id')
shakedown.deployment_wait(timeout_sec, app_id)
print('\n{}install completed after {}\n'.format(
shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start)))
else:
print('\n{}install started after {}\n'.format(
shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start)))
except errors.DCOSException as e:
print('\n{}{}'.format(
shakedown.cli.helpers.fchr('>>'), e))
# Install subcommands (if defined)
if pkg.cli_definition():
print("{}installing CLI commands for package '{}'".format(
shakedown.cli.helpers.fchr('>>'), package_name))
subcommand.install(pkg)
return True
def install_package_and_wait(
package_name,
package_version=None,
service_name=None,
options_file=None,
options_json=None,
wait_for_completion=True,
timeout_sec=600,
expected_running_tasks=0
):
""" Install a package via the DC/OS library and wait for completion
"""
return install_package(
package_name,
package_version,
service_name,
options_file,
options_json,
wait_for_completion,
timeout_sec,
expected_running_tasks
)
def package_installed(package_name, service_name=None):
""" Check whether the package package_name is currently installed.
:param package_name: package name
:type package_name: str
:param service_name: service_name
:type service_name: str
:return: True if installed, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
app_installed = len(package_manager.installed_apps(package_name, service_name)) > 0
subcommand_installed = False
for subcmd in package.installed_subcommands():
package_json = subcmd.package_json()
if package_json['name'] == package_name:
subcommand_installed = True
return (app_installed or subcommand_installed)
def uninstall_package(
package_name,
service_name=None,
all_instances=False,
wait_for_completion=False,
timeout_sec=600
):
""" Uninstall a package using the DC/OS library.
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param all_instances: uninstall all instances of package
:type all_instances: bool
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:return: True if uninstall was successful, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
pkg = package_manager.get_package_version(package_name, None)
try:
if service_name is None:
service_name = _get_service_name(package_name, pkg)
print("{}uninstalling package '{}' with service name '{}'\n".format(
shakedown.cli.helpers.fchr('>>'), package_name, service_name))
package_manager.uninstall_app(package_name, all_instances, service_name)
# Optionally wait for the service to unregister as a framework
if wait_for_completion:
wait_for_mesos_task_removal(service_name, timeout_sec=timeout_sec)
except errors.DCOSException as e:
print('\n{}{}'.format(
shakedown.cli.helpers.fchr('>>'), e))
# Uninstall subcommands (if defined)
if pkg.cli_definition():
print("{}uninstalling CLI commands for package '{}'".format(
shakedown.cli.helpers.fchr('>>'), package_name))
subcommand.uninstall(package_name)
return True
def uninstall_package_and_wait(
package_name,
service_name=None,
all_instances=False,
wait_for_completion=True,
timeout_sec=600
):
""" Uninstall a package via the DC/OS library and wait for completion
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param all_instances: uninstall all instances of package
:type all_instances: bool
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
:return: True if uninstall was successful, False otherwise
:rtype: bool
"""
return uninstall_package(
package_name,
service_name,
all_instances,
wait_for_completion,
timeout_sec
)
def uninstall_package_and_data(
package_name,
service_name=None,
role=None,
principal=None,
zk_node=None,
timeout_sec=600):
""" Uninstall a package via the DC/OS library, wait for completion, and delete any persistent data
:param package_name: name of the package
:type package_name: str
:param service_name: unique service name for the package
:type service_name: str
:param role: role to use when deleting data, or <service_name>-role if unset
:type role: str, or None
:param principal: principal to use when deleting data, or <service_name>-principal if unset
:type principal: str, or None
:param zk_node: zk node to delete, or dcos-service-<service_name> if unset
:type zk_node: str, or None
:param wait_for_completion: whether or not to wait for task completion before returning
:type wait_for_completion: bool
:param timeout_sec: number of seconds to wait for task completion
:type timeout_sec: int
"""
start = time.time()
if service_name is None:
pkg = _get_package_manager().get_package_version(package_name, None)
service_name = _get_service_name(package_name, pkg)
print('\n{}uninstalling/deleting {}'.format(shakedown.cli.helpers.fchr('>>'), service_name))
try:
uninstall_package_and_wait(package_name, service_name=service_name, timeout_sec=timeout_sec)
except (errors.DCOSException, ValueError) as e:
print('Got exception when uninstalling package, ' +
'continuing with janitor anyway: {}'.format(e))
data_start = time.time()
if (not role or not principal or not zk_node) and service_name is None:
raise DCOSException('service_name must be provided when data params are missing AND the package isn\'t installed')
if not role:
role = '{}-role'.format(service_name)
if not zk_node:
zk_node = 'dcos-service-{}'.format(service_name)
delete_persistent_data(role, zk_node)
finish = time.time()
print('\n{}uninstall/delete done after pkg({}) + data({}) = total({})\n'.format(
shakedown.cli.helpers.fchr('>>'),
pretty_duration(data_start - start),
pretty_duration(finish - data_start),
pretty_duration(finish - start)))
def get_package_repos():
""" Return a list of configured package repositories
"""
package_manager = _get_package_manager()
return package_manager.get_repos()
def package_version_changed_predicate(package_manager, package_name, prev_version):
""" Returns whether the provided package has a version other than prev_version
"""
return package_manager.get_package_version(package_name, None) != prev_version
def add_package_repo(
repo_name,
repo_url,
index=None,
wait_for_package=None,
expect_prev_version=None):
""" Add a repository to the list of package sources
:param repo_name: name of the repository to add
:type repo_name: str
:param repo_url: location of the repository to add
:type repo_url: str
:param index: index (precedence) for this repository
:type index: int
:param wait_for_package: the package whose version should change after the repo is added
:type wait_for_package: str, or None
:return: True if successful, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
if wait_for_package:
prev_version = package_manager.get_package_version(wait_for_package, None)
if not package_manager.add_repo(repo_name, repo_url, index):
return False
if wait_for_package:
try:
spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version))
except TimeoutExpired:
return False
return True
def remove_package_repo(repo_name, wait_for_package=None):
""" Remove a repository from the list of package sources
:param repo_name: name of the repository to remove
:type repo_name: str
:param wait_for_package: the package whose version should change after the repo is removed
:type wait_for_package: str, or None
:returns: True if successful, False otherwise
:rtype: bool
"""
package_manager = _get_package_manager()
if wait_for_package:
prev_version = package_manager.get_package_version(wait_for_package, None)
if not package_manager.remove_repo(repo_name):
return False
if wait_for_package:
try:
spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version))
except TimeoutExpired:
return False
return True
def remove_package_repo_and_wait(repo_name, wait_for_package):
""" Remove a repository from the list of package sources, then wait for the removal to complete
:param repo_name: name of the repository to remove
:type repo_name: str
:param wait_for_package: the package whose version should change after the repo is removed
:type wait_for_package: str
:returns: True if successful, False otherwise
:rtype: bool
"""
return remove_package_repo(repo_name, wait_for_package)
def get_package_versions(package_name):
""" Returns the list of versions of a given package
:param package_name: name of the package
:type package_name: str
"""
package_manager = _get_package_manager()
pkg = package_manager.get_package_version(package_name, None)
return pkg.package_versions()
| {
"content_hash": "fb3a91cf8edab05e94a62d78b92e3259",
"timestamp": "",
"source": "github",
"line_count": 432,
"max_line_length": 122,
"avg_line_length": 34.28472222222222,
"alnum_prop": 0.6405374383903856,
"repo_name": "dcos/shakedown",
"id": "960cbdaa09694084a0a9f8fc18f7ca4d63945303",
"size": "14811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shakedown/dcos/package.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "138618"
}
],
"symlink_target": ""
} |
def read_map(example_file):
with open(example_file) as f:
raw_map = f.readlines()
num = int(raw_map.pop(0))
ans = int(raw_map.pop(0))
test_map = [line.strip() for line in raw_map]
return [list(line) for line in test_map if line], num, ans
| {
"content_hash": "0d5ac1244cdf18248711917d8eaa6913",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 59,
"avg_line_length": 35.142857142857146,
"alnum_prop": 0.6666666666666666,
"repo_name": "gaoyunzhi/pathery",
"id": "822231a98d6ec099825fc5d36fc00363f148ce9b",
"size": "246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "read_map.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19273"
}
],
"symlink_target": ""
} |
"""The setup script."""
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
# put package requirements here
requirements = [
'jmespath',
'python-box'
]
# Optional packages
extras = {
'allmeta': ['piexif', 'filetype'],
'winlnk': ['pywin32'],
'frontmatter': ['python-frontmatter']
}
# put setup requirements (distutils extensions, etc.) here
setup_requirements = [
'pytest-runner',
]
# put package test requirements here
test_requirements = [
'pytest',
'pytest-catchlog',
'piexif',
'filetype',
'jmespath',
'python-box',
'python-frontmatter'
]
setup(
name='taggo',
version='0.18.0',
description="Tag organizer that creates symlinks from filename-tags",
long_description=readme + '\n\n' + history,
author="Lars Solberg",
author_email='lars.solberg@gmail.com',
url='https://github.com/xeor/taggo',
packages=find_packages(include=['taggo', 'taggo.metadata'], exclude=['tests']),
include_package_data=True,
install_requires=requirements,
extras_require=extras,
license="MIT license",
zip_safe=False,
keywords='taggo',
entry_points={
'console_scripts': [
'taggo = taggo:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'Topic :: System :: Filesystems',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
tests_require=test_requirements,
setup_requires=setup_requirements,
)
| {
"content_hash": "bfd16b0159f2deb46649a3fd34353a23",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 83,
"avg_line_length": 25.575342465753426,
"alnum_prop": 0.629887520085699,
"repo_name": "xeor/taggo",
"id": "a2585ea53835d6325bf354353401f3023324ff48",
"size": "1914",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "262"
},
{
"name": "Makefile",
"bytes": "2381"
},
{
"name": "Python",
"bytes": "47629"
},
{
"name": "Shell",
"bytes": "127"
}
],
"symlink_target": ""
} |
"""Interfaces for credentials."""
import abc
import six
from google.auth import _helpers
@six.add_metaclass(abc.ABCMeta)
class Credentials(object):
"""Base class for all credentials.
All credentials have a :attr:`token` that is used for authentication and
may also optionally set an :attr:`expiry` to indicate when the token will
no longer be valid.
Most credentials will be :attr:`invalid` until :meth:`refresh` is called.
Credentials can do this automatically before the first HTTP request in
:meth:`before_request`.
Although the token and expiration will change as the credentials are
:meth:`refreshed <refresh>` and used, credentials should be considered
immutable. Various credentials will accept configuration such as private
keys, scopes, and other options. These options are not changeable after
construction. Some classes will provide mechanisms to copy the credentials
with modifications such as :meth:`ScopedCredentials.with_scopes`.
"""
def __init__(self):
self.token = None
"""str: The bearer token that can be used in HTTP headers to make
authenticated requests."""
self.expiry = None
"""Optional[datetime]: When the token expires and is no longer valid.
If this is None, the token is assumed to never expire."""
self._quota_project_id = None
"""Optional[str]: Project to use for quota and billing purposes."""
@property
def expired(self):
"""Checks if the credentials are expired.
Note that credentials can be invalid but not expired because
Credentials with :attr:`expiry` set to None is considered to never
expire.
"""
if not self.expiry:
return False
# Remove 5 minutes from expiry to err on the side of reporting
# expiration early so that we avoid the 401-refresh-retry loop.
skewed_expiry = self.expiry - _helpers.CLOCK_SKEW
return _helpers.utcnow() >= skewed_expiry
@property
def valid(self):
"""Checks the validity of the credentials.
This is True if the credentials have a :attr:`token` and the token
is not :attr:`expired`.
"""
return self.token is not None and not self.expired
@property
def quota_project_id(self):
"""Project to use for quota and billing purposes."""
return self._quota_project_id
@abc.abstractmethod
def refresh(self, request):
"""Refreshes the access token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
"""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Refresh must be implemented")
def apply(self, headers, token=None):
"""Apply the token to the authentication header.
Args:
headers (Mapping): The HTTP request headers.
token (Optional[str]): If specified, overrides the current access
token.
"""
headers["authorization"] = "Bearer {}".format(
_helpers.from_bytes(token or self.token)
)
if self.quota_project_id:
headers["x-goog-user-project"] = self.quota_project_id
def before_request(self, request, method, url, headers):
"""Performs credential-specific before request logic.
Refreshes the credentials if necessary, then calls :meth:`apply` to
apply the token to the authentication header.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
method (str): The request's HTTP method or the RPC method being
invoked.
url (str): The request's URI or the RPC service's URI.
headers (Mapping): The request's headers.
"""
# pylint: disable=unused-argument
# (Subclasses may use these arguments to ascertain information about
# the http request.)
if not self.valid:
self.refresh(request)
self.apply(headers)
class CredentialsWithQuotaProject(Credentials):
"""Abstract base for credentials supporting ``with_quota_project`` factory"""
def with_quota_project(self, quota_project_id):
"""Returns a copy of these credentials with a modified quota project.
Args:
quota_project_id (str): The project to use for quota and
billing purposes
Returns:
google.oauth2.credentials.Credentials: A new credentials instance.
"""
raise NotImplementedError("This credential does not support quota project.")
class AnonymousCredentials(Credentials):
"""Credentials that do not provide any authentication information.
These are useful in the case of services that support anonymous access or
local service emulators that do not use credentials.
"""
@property
def expired(self):
"""Returns `False`, anonymous credentials never expire."""
return False
@property
def valid(self):
"""Returns `True`, anonymous credentials are always valid."""
return True
def refresh(self, request):
"""Raises :class:`ValueError``, anonymous credentials cannot be
refreshed."""
raise ValueError("Anonymous credentials cannot be refreshed.")
def apply(self, headers, token=None):
"""Anonymous credentials do nothing to the request.
The optional ``token`` argument is not supported.
Raises:
ValueError: If a token was specified.
"""
if token is not None:
raise ValueError("Anonymous credentials don't support tokens.")
def before_request(self, request, method, url, headers):
"""Anonymous credentials do nothing to the request."""
@six.add_metaclass(abc.ABCMeta)
class ReadOnlyScoped(object):
"""Interface for credentials whose scopes can be queried.
OAuth 2.0-based credentials allow limiting access using scopes as described
in `RFC6749 Section 3.3`_.
If a credential class implements this interface then the credentials either
use scopes in their implementation.
Some credentials require scopes in order to obtain a token. You can check
if scoping is necessary with :attr:`requires_scopes`::
if credentials.requires_scopes:
# Scoping is required.
credentials = credentials.with_scopes(scopes=['one', 'two'])
Credentials that require scopes must either be constructed with scopes::
credentials = SomeScopedCredentials(scopes=['one', 'two'])
Or must copy an existing instance using :meth:`with_scopes`::
scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
Some credentials have scopes but do not allow or require scopes to be set,
these credentials can be used as-is.
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
"""
def __init__(self):
super(ReadOnlyScoped, self).__init__()
self._scopes = None
@property
def scopes(self):
"""Sequence[str]: the credentials' current set of scopes."""
return self._scopes
@abc.abstractproperty
def requires_scopes(self):
"""True if these credentials require scopes to obtain an access token.
"""
return False
def has_scopes(self, scopes):
"""Checks if the credentials have the given scopes.
.. warning: This method is not guaranteed to be accurate if the
credentials are :attr:`~Credentials.invalid`.
Args:
scopes (Sequence[str]): The list of scopes to check.
Returns:
bool: True if the credentials have the given scopes.
"""
return set(scopes).issubset(set(self._scopes or []))
class Scoped(ReadOnlyScoped):
"""Interface for credentials whose scopes can be replaced while copying.
OAuth 2.0-based credentials allow limiting access using scopes as described
in `RFC6749 Section 3.3`_.
If a credential class implements this interface then the credentials either
use scopes in their implementation.
Some credentials require scopes in order to obtain a token. You can check
if scoping is necessary with :attr:`requires_scopes`::
if credentials.requires_scopes:
# Scoping is required.
credentials = credentials.create_scoped(['one', 'two'])
Credentials that require scopes must either be constructed with scopes::
credentials = SomeScopedCredentials(scopes=['one', 'two'])
Or must copy an existing instance using :meth:`with_scopes`::
scoped_credentials = credentials.with_scopes(scopes=['one', 'two'])
Some credentials have scopes but do not allow or require scopes to be set,
these credentials can be used as-is.
.. _RFC6749 Section 3.3: https://tools.ietf.org/html/rfc6749#section-3.3
"""
@abc.abstractmethod
def with_scopes(self, scopes):
"""Create a copy of these credentials with the specified scopes.
Args:
scopes (Sequence[str]): The list of scopes to attach to the
current credentials.
Raises:
NotImplementedError: If the credentials' scopes can not be changed.
This can be avoided by checking :attr:`requires_scopes` before
calling this method.
"""
raise NotImplementedError("This class does not require scoping.")
def with_scopes_if_required(credentials, scopes):
"""Creates a copy of the credentials with scopes if scoping is required.
This helper function is useful when you do not know (or care to know) the
specific type of credentials you are using (such as when you use
:func:`google.auth.default`). This function will call
:meth:`Scoped.with_scopes` if the credentials are scoped credentials and if
the credentials require scoping. Otherwise, it will return the credentials
as-is.
Args:
credentials (google.auth.credentials.Credentials): The credentials to
scope if necessary.
scopes (Sequence[str]): The list of scopes to use.
Returns:
google.auth.credentials.Credentials: Either a new set of scoped
credentials, or the passed in credentials instance if no scoping
was required.
"""
if isinstance(credentials, Scoped) and credentials.requires_scopes:
return credentials.with_scopes(scopes)
else:
return credentials
@six.add_metaclass(abc.ABCMeta)
class Signing(object):
"""Interface for credentials that can cryptographically sign messages."""
@abc.abstractmethod
def sign_bytes(self, message):
"""Signs the given message.
Args:
message (bytes): The message to sign.
Returns:
bytes: The message's cryptographic signature.
"""
# pylint: disable=missing-raises-doc,redundant-returns-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Sign bytes must be implemented.")
@abc.abstractproperty
def signer_email(self):
"""Optional[str]: An email address that identifies the signer."""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Signer email must be implemented.")
@abc.abstractproperty
def signer(self):
"""google.auth.crypt.Signer: The signer used to sign bytes."""
# pylint: disable=missing-raises-doc
# (pylint doesn't recognize that this is abstract)
raise NotImplementedError("Signer must be implemented.")
| {
"content_hash": "fd55e5fa8d7b32785c5113e2ede8fe97",
"timestamp": "",
"source": "github",
"line_count": 336,
"max_line_length": 84,
"avg_line_length": 35.601190476190474,
"alnum_prop": 0.658752716936967,
"repo_name": "javier-ruiz-b/docker-rasppi-images",
"id": "bc42546b9a0615984d2648dd7e61fb191efbc136",
"size": "12539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raspberry-google-home/env/lib/python3.7/site-packages/google/auth/credentials.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "15254"
},
{
"name": "PHP",
"bytes": "1132"
},
{
"name": "Shell",
"bytes": "17522"
}
],
"symlink_target": ""
} |
scope = "Private Program"
description = """
User with authorization to peform administrative tasks such as associating
users to roles within the scope of of a program.<br/><br/>When a person
creates a program they are automatically given the ProgramOwner role. This
allows them to Edit, Delete, or Map objects to the Program. It also allows
them to add people and assign them roles when their programs are private.
ProgramOwner is the most powerful role.
"""
permissions = {
"read": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole",
"Context",
],
"create": [
"ObjectDocument",
"ObjectPerson",
"ProgramControl",
"Relationship",
"UserRole",
"Audit",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole"
],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole",
]
}
| {
"content_hash": "0916cc9d6594b5e13cf2d1ed99338cac",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 76,
"avg_line_length": 25.70212765957447,
"alnum_prop": 0.570364238410596,
"repo_name": "uskudnik/ggrc-core",
"id": "df7d7c6bd67ed51a624d55758063678717e256da",
"size": "1208",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/ggrc_basic_permissions/roles/ProgramOwner.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "232153"
},
{
"name": "Cucumber",
"bytes": "140526"
},
{
"name": "HTML",
"bytes": "6048248"
},
{
"name": "JavaScript",
"bytes": "1878527"
},
{
"name": "Makefile",
"bytes": "5524"
},
{
"name": "Mako",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "1532862"
},
{
"name": "Ruby",
"bytes": "1496"
},
{
"name": "Shell",
"bytes": "11509"
}
],
"symlink_target": ""
} |
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(["Willie", "Rae"], first_name)
self.assertEqual("Johnson", last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual("Rob", first_name)
self.assertEqual("Roy", last_name)
| {
"content_hash": "767daf00478eca417b1a66b6034374b1",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 62,
"avg_line_length": 37.166666666666664,
"alnum_prop": 0.5885650224215246,
"repo_name": "bmcorser/python_koans",
"id": "b0b80615eabe2e98783805d2e325e2255c08c6bf",
"size": "995",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "python2/koans/about_list_assignments.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "325264"
},
{
"name": "Shell",
"bytes": "80"
}
],
"symlink_target": ""
} |
from . import Operation, INestedOperation, ONestedOperation, NestedOperation
class NestedOp(Operation): # Risolutore e contenitore di operazioni multiple (per ora protocollo RegKey)
def __init__(self, oplist):
self._oplist = oplist
def RefLastOp(self):
return self._oplist[-1]
def PreCompile(self, env):
MObuffer = None
for op in self._oplist:
if isinstance(op, INestedOperation):
op.InputMemObj(MObuffer)
op.PreCompile(env)
if isinstance(op, ONestedOperation):
MObuffer = op.OutputMemObj()
def GetCode(self, env, p):
code = ""
pointer = int(p)
for op in self._oplist:
newcode, newpointer = op.GetCode(env, pointer)
code += newcode
pointer = newpointer
return code, pointer
# TODO: Unittest & Integrationtest
class ClosureOp(Operation, NestedOperation):
def __init__(self, oplist):
super().__init__()
self._oplist = oplist
def PreCompile(self, env):
regkeys = []
if self._IMEMOBJ is not None:
regkeys.append(self._IMEMOBJ)
for op in self._oplist:
if isinstance(op, INestedOperation):
op.InputMemObj(self._IMEMOBJ)
op.PreCompile(env)
regkeys.append(op.OutputMemObj())
self._OMEMOBJ = tuple(regkeys)
def GetCode(self, env, p):
code = ""
pointer = int(p)
for op in self._oplist:
newcode, newpointer = op.GetCode(env, pointer)
code += newcode
pointer = newpointer
return code, pointer
| {
"content_hash": "0b50e9b48a14c940997ab70f206c7443",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 105,
"avg_line_length": 30.87037037037037,
"alnum_prop": 0.5776844631073785,
"repo_name": "eisterman/MurPy",
"id": "839cf546babd3a8911db91d10cb2ff3d26832573",
"size": "1667",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "murpy/core/operations/special.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "53753"
}
],
"symlink_target": ""
} |
"""Integration tests for gsutil -D option."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import platform
import re
import six
import gslib
from gslib.cs_api_map import ApiSelector
import gslib.tests.testcase as testcase
from gslib.tests.testcase.integration_testcase import SkipForS3
from gslib.tests.util import ObjectToURI as suri
from gslib.tests.util import SetBotoConfigForTest
from gslib.utils.unit_util import ONE_KIB
@SkipForS3('-D output is implementation-specific.')
class TestDOption(testcase.GsUtilIntegrationTestCase):
"""Integration tests for gsutil -D option."""
def assert_header_in_output(self, name, value, output):
"""Asserts that httplib2's debug logger printed out a specified header.
This method is fairly primitive and uses assertIn statements, and thus is
case-sensitive. Values should be normalized (e.g. to lowercase) if
capitalization of the expected characters may vary.
Args:
name: (str) The header name, e.g. "Content-Length".
value: (Union[str, None]) The header value, e.g. "4096". If no value is
expected for the header or the value is unknown, this argument should
be `None`.
output: (str) The string in which to search for the specified header.
"""
expected = 'header: %s:' % name
if value:
# Only append a space and then the header value if a value was expected.
expected += ' %s' % value
if expected in output:
return
# Try the other format - when sending requests via the XML API, headers are
# printed in a list of 2-tuples (by Boto), so we test for that output style
# as well. The above style is generally preferred, but Python's http client
# doesn't print all values in scenarios where a header is sent multiple
# times with different values, e.g. in this case:
# x-goog-hash: md5=blah2
# x-goog-hash: crc32c=blah1
# the debug logger would just print the last one to occur (the crc32c hash).
alt_expected = "('%s'" % name
if value:
# Only check for the second part of the tuple if a value was expected.
alt_expected += ", '%s')" % value
if not alt_expected in output:
self.fail('Neither of these two header formats were found in the output:'
'\n1) %s\n2) %s\nOutput string: %s' %
(expected, alt_expected, output))
def test_minus_D_multipart_upload(self):
"""Tests that debug option does not output upload media body."""
# We want to ensure it works with and without a trailing newline.
for file_contents in (b'a1b2c3d4', b'a1b2c3d4\n'):
fpath = self.CreateTempFile(contents=file_contents)
bucket_uri = self.CreateBucket()
with SetBotoConfigForTest([('GSUtil', 'resumable_threshold', str(ONE_KIB))
]):
stderr = self.RunGsUtil(
['-D', 'cp', fpath, suri(bucket_uri)], return_stderr=True)
print('command line:' + ' '.join(['-D', 'cp', fpath, suri(bucket_uri)]))
if self.test_api == ApiSelector.JSON:
self.assertIn('media body', stderr)
self.assertNotIn('a1b2c3d4', stderr)
self.assertIn('Comparing local vs cloud md5-checksum for', stderr)
self.assertIn('total_bytes_transferred: %d' % len(file_contents),
stderr)
def test_minus_D_perf_trace_cp(self):
"""Test upload and download with a sample perf trace token."""
file_name = 'bar'
fpath = self.CreateTempFile(file_name=file_name, contents=b'foo')
bucket_uri = self.CreateBucket()
stderr = self.RunGsUtil(
['-D', '--perf-trace-token=123', 'cp', fpath,
suri(bucket_uri)],
return_stderr=True)
self.assertIn('\'cookie\': \'123\'', stderr)
stderr2 = self.RunGsUtil([
'-D', '--perf-trace-token=123', 'cp',
suri(bucket_uri, file_name), fpath
],
return_stderr=True)
self.assertIn('\'cookie\': \'123\'', stderr2)
def test_minus_D_resumable_upload(self):
fpath = self.CreateTempFile(contents=b'a1b2c3d4')
bucket_uri = self.CreateBucket()
with SetBotoConfigForTest([('GSUtil', 'resumable_threshold', '4')]):
stderr = self.RunGsUtil(
['-D', 'cp', fpath, suri(bucket_uri)], return_stderr=True)
self.assertNotIn('a1b2c3d4', stderr)
self.assertIn('Comparing local vs cloud md5-checksum for', stderr)
self.assertIn('total_bytes_transferred: 8', stderr)
def test_minus_D_cat(self):
"""Tests cat command with debug option."""
key_uri = self.CreateObject(contents=b'0123456789')
with SetBotoConfigForTest([('Boto', 'proxy_pass', 'secret')]):
(stdout,
stderr) = self.RunGsUtil(['-D', 'cat', suri(key_uri)],
return_stdout=True,
return_stderr=True)
# Check for log messages we output.
self.assertIn('You are running gsutil with debug output enabled.', stderr)
self.assertIn('config:', stderr)
if six.PY2:
self.assertIn("('proxy_pass', u'REDACTED')", stderr)
else:
self.assertIn("('proxy_pass', 'REDACTED')", stderr)
# Check for log messages from httplib2 / http_client.
self.assertIn("reply: 'HTTP/1.1 200 OK", stderr)
self.assert_header_in_output('Expires', None, stderr)
self.assert_header_in_output('Date', None, stderr)
self.assert_header_in_output('Content-Type', 'application/octet-stream',
stderr)
self.assert_header_in_output('Content-Length', '10', stderr)
if self.test_api == ApiSelector.XML:
self.assert_header_in_output('Cache-Control', None, stderr)
self.assert_header_in_output('ETag', '"781e5e245d69b566979b86e28d23f2c7"',
stderr)
self.assert_header_in_output('Last-Modified', None, stderr)
self.assert_header_in_output('x-goog-generation', None, stderr)
self.assert_header_in_output('x-goog-metageneration', '1', stderr)
self.assert_header_in_output('x-goog-hash', 'crc32c=KAwGng==', stderr)
self.assert_header_in_output('x-goog-hash',
'md5=eB5eJF1ptWaXm4bijSPyxw==', stderr)
# Check request fields show correct segments.
regex_str = r'''send:\s+([b|u]')?HEAD /%s/%s HTTP/[^\\]*\\r\\n(.*)''' % (
key_uri.bucket_name, key_uri.object_name)
regex = re.compile(regex_str)
match = regex.search(stderr)
if not match:
self.fail('Did not find this regex in stderr:\nRegex: %s\nStderr: %s' %
(regex_str, stderr))
request_fields_str = match.group(2)
self.assertIn('Content-Length: 0', request_fields_str)
self.assertRegex(
request_fields_str,
'User-Agent: .*gsutil/%s.*interactive/False command/cat' %
gslib.VERSION)
elif self.test_api == ApiSelector.JSON:
if six.PY2:
self.assertIn("md5Hash: u'eB5eJF1ptWaXm4bijSPyxw=='", stderr)
else:
self.assertIn("md5Hash: 'eB5eJF1ptWaXm4bijSPyxw=='", stderr)
self.assert_header_in_output(
'Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate',
stderr)
self.assertRegex(
stderr,
'.*GET.*b/%s/o/%s' % (key_uri.bucket_name, key_uri.object_name))
self.assertRegex(
stderr, 'Python/%s.gsutil/%s.*interactive/False command/cat' %
(platform.python_version(), gslib.VERSION))
if gslib.IS_PACKAGE_INSTALL:
self.assertIn('PACKAGED_GSUTIL_INSTALLS_DO_NOT_HAVE_CHECKSUMS', stdout)
else:
self.assertRegex(stdout, r'.*checksum: [0-9a-f]{32}.*')
self.assertIn('gsutil version: %s' % gslib.VERSION, stdout)
self.assertIn('boto version: ', stdout)
self.assertIn('python version: ', stdout)
self.assertIn('OS: ', stdout)
self.assertIn('multiprocessing available: ', stdout)
self.assertIn('using cloud sdk: ', stdout)
self.assertIn('pass cloud sdk credentials to gsutil: ', stdout)
self.assertIn('config path(s): ', stdout)
self.assertIn('gsutil path: ', stdout)
self.assertIn('compiled crcmod: ', stdout)
self.assertIn('installed via package manager: ', stdout)
self.assertIn('editable install: ', stdout)
| {
"content_hash": "6302d6dc0fb019a9087cc0c01037c75f",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 80,
"avg_line_length": 45.027027027027025,
"alnum_prop": 0.6384153661464586,
"repo_name": "catapult-project/catapult",
"id": "0fef3e8c800e347aa0e792e35bab91ee06324339",
"size": "8950",
"binary": false,
"copies": "10",
"ref": "refs/heads/main",
"path": "third_party/gsutil/gslib/tests/test_Doption.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1324"
},
{
"name": "C++",
"bytes": "46069"
},
{
"name": "CSS",
"bytes": "23376"
},
{
"name": "Dockerfile",
"bytes": "1541"
},
{
"name": "Go",
"bytes": "114396"
},
{
"name": "HTML",
"bytes": "12394298"
},
{
"name": "JavaScript",
"bytes": "1559584"
},
{
"name": "Makefile",
"bytes": "1774"
},
{
"name": "Python",
"bytes": "6778695"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(1031, 143)
font = QtGui.QFont()
font.setFamily(_fromUtf8("Trebuchet MS"))
Form.setFont(font)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label = QtGui.QLabel(Form)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.comboBox_company = QtGui.QComboBox(Form)
self.comboBox_company.setObjectName(_fromUtf8("comboBox_company"))
self.gridLayout.addWidget(self.comboBox_company, 1, 1, 1, 1)
self.label_5 = QtGui.QLabel(Form)
self.label_5.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.gridLayout.addWidget(self.label_5, 1, 0, 1, 1)
self.label_6 = QtGui.QLabel(Form)
self.label_6.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.gridLayout.addWidget(self.label_6, 1, 2, 1, 1)
self.lineEdit_company_contact = QtGui.QLineEdit(Form)
self.lineEdit_company_contact.setEnabled(False)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEdit_company_contact.sizePolicy().hasHeightForWidth())
self.lineEdit_company_contact.setSizePolicy(sizePolicy)
self.lineEdit_company_contact.setMinimumSize(QtCore.QSize(120, 0))
self.lineEdit_company_contact.setMaximumSize(QtCore.QSize(150, 16777215))
self.lineEdit_company_contact.setMaxLength(12)
self.lineEdit_company_contact.setReadOnly(True)
self.lineEdit_company_contact.setObjectName(_fromUtf8("lineEdit_company_contact"))
self.gridLayout.addWidget(self.lineEdit_company_contact, 1, 3, 1, 1)
self.lineEdit_company_address = QtGui.QLineEdit(Form)
self.lineEdit_company_address.setEnabled(False)
self.lineEdit_company_address.setReadOnly(True)
self.lineEdit_company_address.setObjectName(_fromUtf8("lineEdit_company_address"))
self.gridLayout.addWidget(self.lineEdit_company_address, 1, 7, 1, 4)
self.label_2 = QtGui.QLabel(Form)
self.label_2.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 0, 4, 1, 1)
self.lineEdit_contract_name = QtGui.QLineEdit(Form)
self.lineEdit_contract_name.setObjectName(_fromUtf8("lineEdit_contract_name"))
self.gridLayout.addWidget(self.lineEdit_contract_name, 0, 1, 1, 3)
self.label_7 = QtGui.QLabel(Form)
self.label_7.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout.addWidget(self.label_7, 1, 4, 1, 1)
self.label_8 = QtGui.QLabel(Form)
self.label_8.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.gridLayout.addWidget(self.label_8, 1, 6, 1, 1)
self.lineEdit_company_telephone = QtGui.QLineEdit(Form)
self.lineEdit_company_telephone.setEnabled(False)
self.lineEdit_company_telephone.setMinimumSize(QtCore.QSize(120, 0))
self.lineEdit_company_telephone.setMaximumSize(QtCore.QSize(150, 16777215))
self.lineEdit_company_telephone.setReadOnly(True)
self.lineEdit_company_telephone.setObjectName(_fromUtf8("lineEdit_company_telephone"))
self.gridLayout.addWidget(self.lineEdit_company_telephone, 1, 5, 1, 1)
self.lineEdit_contract_number = QtGui.QLineEdit(Form)
self.lineEdit_contract_number.setMinimumSize(QtCore.QSize(120, 0))
self.lineEdit_contract_number.setMaximumSize(QtCore.QSize(150, 16777215))
self.lineEdit_contract_number.setObjectName(_fromUtf8("lineEdit_contract_number"))
self.gridLayout.addWidget(self.lineEdit_contract_number, 0, 5, 1, 1)
self.label_4 = QtGui.QLabel(Form)
self.label_4.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 0, 10, 1, 1)
self.label_3 = QtGui.QLabel(Form)
self.label_3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 0, 6, 1, 1)
self.doubleSpinBox_contract_amount = QtGui.QDoubleSpinBox(Form)
self.doubleSpinBox_contract_amount.setMinimumSize(QtCore.QSize(120, 0))
self.doubleSpinBox_contract_amount.setMaximumSize(QtCore.QSize(150, 16777215))
self.doubleSpinBox_contract_amount.setMaximum(999999999.99)
self.doubleSpinBox_contract_amount.setObjectName(_fromUtf8("doubleSpinBox_contract_amount"))
self.gridLayout.addWidget(self.doubleSpinBox_contract_amount, 0, 7, 1, 1)
self.pushButton_preset_companies = QtGui.QPushButton(Form)
self.pushButton_preset_companies.setObjectName(_fromUtf8("pushButton_preset_companies"))
self.gridLayout.addWidget(self.pushButton_preset_companies, 1, 11, 1, 1)
self.dateEdit_sign_date = QtGui.QDateEdit(Form)
self.dateEdit_sign_date.setMinimumSize(QtCore.QSize(120, 0))
self.dateEdit_sign_date.setMaximumSize(QtCore.QSize(150, 16777215))
self.dateEdit_sign_date.setObjectName(_fromUtf8("dateEdit_sign_date"))
self.gridLayout.addWidget(self.dateEdit_sign_date, 0, 11, 1, 1)
self.label_9 = QtGui.QLabel(Form)
self.label_9.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.gridLayout.addWidget(self.label_9, 0, 8, 1, 1)
self.comboBox = QtGui.QComboBox(Form)
self.comboBox.setMinimumSize(QtCore.QSize(120, 0))
self.comboBox.setMaximumSize(QtCore.QSize(150, 16777215))
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.gridLayout.addWidget(self.comboBox, 0, 9, 1, 1)
self.verticalLayout.addLayout(self.gridLayout)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.label.setText(_translate("Form", "合同名称", None))
self.label_5.setText(_translate("Form", "合同供方", None))
self.label_6.setText(_translate("Form", "联系人", None))
self.label_2.setText(_translate("Form", "合同编号", None))
self.lineEdit_contract_name.setText(_translate("Form", "测试用例合同名称随机", None))
self.label_7.setText(_translate("Form", "联系电话", None))
self.label_8.setText(_translate("Form", "联系地址", None))
self.label_4.setText(_translate("Form", "签订日期", None))
self.label_3.setText(_translate("Form", "合同金额", None))
self.pushButton_preset_companies.setText(_translate("Form", "管理公司信息", None))
self.dateEdit_sign_date.setDisplayFormat(_translate("Form", "yyyy-M-d", None))
self.label_9.setText(_translate("Form", "合同类型", None))
| {
"content_hash": "5d7fc0768ba81a1440bcebddfb70124f",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 106,
"avg_line_length": 59.40689655172414,
"alnum_prop": 0.700139308103088,
"repo_name": "earlideal/jiandan",
"id": "b39bcc135d30b5f53836ed4f86828c349a3c4272",
"size": "8922",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "views/contract_template.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "121193"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import *
from six.moves.urllib.request import urlopen
from six.moves.urllib.error import URLError, HTTPError
def download(url, outfname):
"""
Download target URL
:param url:
:param outfname:
:return:
"""
try:
data = urlopen(url)
with open(outfname, "wb") as f:
f.write(data.read())
except HTTPError as e:
print("HTTP Error: {} {}".format(e.code, url))
except URLError as e:
print("URL Error: {} {}".format(e.reason, url))
| {
"content_hash": "afb0e9424f30d3093a3f957b7e87336c",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 55,
"avg_line_length": 26.72,
"alnum_prop": 0.6377245508982036,
"repo_name": "HazyResearch/snorkel",
"id": "81547e5553b4e75c7c2339fd0ba983e93023c393",
"size": "668",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snorkel/contrib/brat/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "15678"
},
{
"name": "Jupyter Notebook",
"bytes": "1238790"
},
{
"name": "Python",
"bytes": "560449"
},
{
"name": "Shell",
"bytes": "5052"
}
],
"symlink_target": ""
} |
import pytest
@pytest.mark.plugin('jobs')
def testConstantsIsDefined(server):
from girder.plugins.jobs import constants
assert constants.JobStatus.isValid(constants.JobStatus.SUCCESS) is True
| {
"content_hash": "1e236413e5db8d2894d6ad0b5ce6b709",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 75,
"avg_line_length": 28.857142857142858,
"alnum_prop": 0.7970297029702971,
"repo_name": "data-exp-lab/girder",
"id": "efab5b137a0ac404c893cb2ab811f27175a3999c",
"size": "944",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/jobs/test_constants.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "42365"
},
{
"name": "CSS",
"bytes": "61237"
},
{
"name": "Dockerfile",
"bytes": "2416"
},
{
"name": "HCL",
"bytes": "1424"
},
{
"name": "HTML",
"bytes": "170299"
},
{
"name": "JavaScript",
"bytes": "1399182"
},
{
"name": "Mako",
"bytes": "8756"
},
{
"name": "Python",
"bytes": "2388013"
},
{
"name": "Roff",
"bytes": "17"
},
{
"name": "Ruby",
"bytes": "10593"
},
{
"name": "Shell",
"bytes": "7661"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import frappe
import HTMLParser
import smtplib
from frappe import msgprint, throw, _
from frappe.email.smtp import SMTPServer, get_outgoing_email_account
from frappe.email.email_body import get_email, get_formatted_html
from frappe.utils.verified_command import get_signed_params, verify_request
from html2text import html2text
from frappe.utils import get_url, nowdate, encode, now_datetime, add_days, split_emails, cstr
from rq.timeouts import JobTimeoutException
from frappe.utils.scheduler import log
class EmailLimitCrossedError(frappe.ValidationError): pass
def send(recipients=None, sender=None, subject=None, message=None, reference_doctype=None,
reference_name=None, unsubscribe_method=None, unsubscribe_params=None, unsubscribe_message=None,
attachments=None, reply_to=None, cc=(), show_as_cc=(), message_id=None, in_reply_to=None, send_after=None,
expose_recipients=False, send_priority=1, communication=None):
"""Add email to sending queue (Email Queue)
:param recipients: List of recipients.
:param sender: Email sender.
:param subject: Email subject.
:param message: Email message.
:param reference_doctype: Reference DocType of caller document.
:param reference_name: Reference name of caller document.
:param send_priority: Priority for Email Queue, default 1.
:param unsubscribe_method: URL method for unsubscribe. Default is `/api/method/frappe.email.queue.unsubscribe`.
:param unsubscribe_params: additional params for unsubscribed links. default are name, doctype, email
:param attachments: Attachments to be sent.
:param reply_to: Reply to be captured here (default inbox)
:param message_id: Used for threading. If a reply is received to this email, Message-Id is sent back as In-Reply-To in received email.
:param in_reply_to: Used to send the Message-Id of a received email back as In-Reply-To.
:param send_after: Send this email after the given datetime. If value is in integer, then `send_after` will be the automatically set to no of days from current date.
:param communication: Communication link to be set in Email Queue record
"""
if not unsubscribe_method:
unsubscribe_method = "/api/method/frappe.email.queue.unsubscribe"
if not recipients:
return
if isinstance(recipients, basestring):
recipients = split_emails(recipients)
if isinstance(send_after, int):
send_after = add_days(nowdate(), send_after)
email_account = get_outgoing_email_account(True, append_to=reference_doctype)
if not sender or sender == "Administrator":
sender = email_account.default_sender
check_email_limit(recipients)
formatted = get_formatted_html(subject, message, email_account=email_account)
try:
text_content = html2text(formatted)
except HTMLParser.HTMLParseError:
text_content = "See html attachment"
if reference_doctype and reference_name:
unsubscribed = [d.email for d in frappe.db.get_all("Email Unsubscribe", "email",
{"reference_doctype": reference_doctype, "reference_name": reference_name})]
unsubscribed += [d.email for d in frappe.db.get_all("Email Unsubscribe", "email",
{"global_unsubscribe": 1})]
else:
unsubscribed = []
recipients = [r for r in list(set(recipients)) if r and r not in unsubscribed]
for email in recipients:
email_content = formatted
email_text_context = text_content
if reference_doctype:
unsubscribe_link = get_unsubscribe_link(
reference_doctype=reference_doctype,
reference_name=reference_name,
email=email,
recipients=recipients,
expose_recipients=expose_recipients,
unsubscribe_method=unsubscribe_method,
unsubscribe_params=unsubscribe_params,
unsubscribe_message=unsubscribe_message,
show_as_cc=show_as_cc
)
email_content = email_content.replace("<!--unsubscribe link here-->", unsubscribe_link.html)
email_text_context += unsubscribe_link.text
# show as cc
cc_message = ""
if email in show_as_cc:
cc_message = _("This email was sent to you as CC")
email_content = email_content.replace("<!-- cc message -->", cc_message)
email_text_context = cc_message + "\n" + email_text_context
# add to queue
add(email, sender, subject, email_content, email_text_context, reference_doctype,
reference_name, attachments, reply_to, cc, message_id, in_reply_to, send_after, send_priority, email_account=email_account, communication=communication)
def add(email, sender, subject, formatted, text_content=None,
reference_doctype=None, reference_name=None, attachments=None, reply_to=None,
cc=(), message_id=None, in_reply_to=None, send_after=None, send_priority=1, email_account=None, communication=None):
"""Add to Email Queue"""
e = frappe.new_doc('Email Queue')
e.recipient = email
e.priority = send_priority
try:
mail = get_email(email, sender=sender, formatted=formatted, subject=subject,
text_content=text_content, attachments=attachments, reply_to=reply_to, cc=cc, email_account=email_account)
if message_id:
mail.set_message_id(message_id)
if in_reply_to:
mail.set_in_reply_to(in_reply_to)
e.message = cstr(mail.as_string())
e.sender = mail.sender
except frappe.InvalidEmailAddressError:
# bad email id - don't add to queue
return
e.reference_doctype = reference_doctype
e.reference_name = reference_name
e.communication = communication
e.send_after = send_after
e.insert(ignore_permissions=True)
def check_email_limit(recipients):
# if using settings from site_config.json, check email limit
# No limit for own email settings
smtp_server = SMTPServer()
if (smtp_server.email_account
and getattr(smtp_server.email_account, "from_site_config", False)
or frappe.flags.in_test):
# get count of mails sent this month
this_month = get_emails_sent_this_month()
monthly_email_limit = frappe.conf.get('limits', {}).get('emails') or 500
if (this_month + len(recipients)) > monthly_email_limit:
throw(_("Cannot send this email. You have crossed the sending limit of {0} emails for this month.").format(monthly_email_limit),
EmailLimitCrossedError)
def get_emails_sent_this_month():
return frappe.db.sql("""select count(name) from `tabEmail Queue` where
status='Sent' and MONTH(creation)=MONTH(CURDATE())""")[0][0]
def get_unsubscribe_link(reference_doctype, reference_name,
email, recipients, expose_recipients, show_as_cc,
unsubscribe_method, unsubscribe_params, unsubscribe_message):
email_sent_to = recipients if expose_recipients else [email]
email_sent_cc = ", ".join([e for e in email_sent_to if e in show_as_cc])
email_sent_to = ", ".join([e for e in email_sent_to if e not in show_as_cc])
if email_sent_cc:
email_sent_message = _("This email was sent to {0} and copied to {1}").format(email_sent_to, email_sent_cc)
else:
email_sent_message = _("This email was sent to {0}").format(email_sent_to)
if not unsubscribe_message:
unsubscribe_message = _("Unsubscribe from this list")
unsubscribe_url = get_unsubcribed_url(reference_doctype, reference_name, email,
unsubscribe_method, unsubscribe_params)
html = """<div style="margin: 15px auto; padding: 0px 7px; text-align: center; color: #8d99a6;">
{email}
<p style="margin: 15px auto;">
<a href="{unsubscribe_url}" style="color: #8d99a6; text-decoration: underline;
target="_blank">{unsubscribe_message}
</a>
</p>
</div>""".format(
unsubscribe_url = unsubscribe_url,
email=email_sent_message,
unsubscribe_message=unsubscribe_message
)
text = "\n{email}\n\n{unsubscribe_message}: {unsubscribe_url}".format(
email=email_sent_message,
unsubscribe_message=unsubscribe_message,
unsubscribe_url=unsubscribe_url
)
return frappe._dict({
"html": html,
"text": text
})
def get_unsubcribed_url(reference_doctype, reference_name, email, unsubscribe_method, unsubscribe_params):
params = {"email": email.encode("utf-8"),
"doctype": reference_doctype.encode("utf-8"),
"name": reference_name.encode("utf-8")}
if unsubscribe_params:
params.update(unsubscribe_params)
query_string = get_signed_params(params)
# for test
frappe.local.flags.signed_query_string = query_string
return get_url(unsubscribe_method + "?" + get_signed_params(params))
@frappe.whitelist(allow_guest=True)
def unsubscribe(doctype, name, email):
# unsubsribe from comments and communications
if not verify_request():
return
try:
frappe.get_doc({
"doctype": "Email Unsubscribe",
"email": email,
"reference_doctype": doctype,
"reference_name": name
}).insert(ignore_permissions=True)
except frappe.DuplicateEntryError:
frappe.db.rollback()
else:
frappe.db.commit()
return_unsubscribed_page(email, doctype, name)
def return_unsubscribed_page(email, doctype, name):
frappe.respond_as_web_page(_("Unsubscribed"), _("{0} has left the conversation in {1} {2}").format(email, _(doctype), name))
def flush(from_test=False):
"""flush email queue, every time: called from scheduler"""
# additional check
check_email_limit([])
auto_commit = not from_test
if frappe.are_emails_muted():
msgprint(_("Emails are muted"))
from_test = True
frappe.db.sql("""update `tabEmail Queue` set status='Expired'
where datediff(curdate(), creation) > 7 and status='Not Sent'""", auto_commit=auto_commit)
smtpserver = SMTPServer()
for i in xrange(500):
# don't use for update here, as it leads deadlocks
email = frappe.db.sql('''select * from `tabEmail Queue`
where status='Not Sent' and (send_after is null or send_after < %(now)s)
order by priority desc, creation asc
limit 1''', { 'now': now_datetime() }, as_dict=True)
if email:
email = email[0]
else:
break
send_one(email, smtpserver, auto_commit)
# NOTE: removing commit here because we pass auto_commit
# finally:
# frappe.db.commit()
def send_one(email, smtpserver=None, auto_commit=True, now=False):
'''Send Email Queue with given smtpserver'''
status = frappe.db.sql('''select status from `tabEmail Queue` where name=%s for update''', email.name)[0][0]
if status != 'Not Sent':
# rollback to release lock and return
frappe.db.rollback()
return
frappe.db.sql("""update `tabEmail Queue` set status='Sending', modified=%s where name=%s""",
(now_datetime(), email.name), auto_commit=auto_commit)
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
try:
if auto_commit:
if not smtpserver: smtpserver = SMTPServer()
smtpserver.setup_email_account(email.reference_doctype)
smtpserver.sess.sendmail(email.sender, email.recipient, encode(email.message))
frappe.db.sql("""update `tabEmail Queue` set status='Sent', modified=%s where name=%s""",
(now_datetime(), email.name), auto_commit=auto_commit)
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
except (smtplib.SMTPServerDisconnected,
smtplib.SMTPConnectError,
smtplib.SMTPHeloError,
smtplib.SMTPAuthenticationError,
JobTimeoutException):
# bad connection/timeout, retry later
frappe.db.sql("""update `tabEmail Queue` set status='Not Sent', modified=%s where name=%s""",
(now_datetime(), email.name), auto_commit=auto_commit)
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
# no need to attempt further
return
except Exception, e:
frappe.db.rollback()
frappe.db.sql("""update `tabEmail Queue` set status='Error', error=%s
where name=%s""", (unicode(e), email.name), auto_commit=auto_commit)
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
if now:
raise e
else:
# log to scheduler log
log('frappe.email.queue.flush', unicode(e))
def clear_outbox():
"""Remove mails older than 31 days in Outbox. Called daily via scheduler."""
frappe.db.sql("""delete from `tabEmail Queue` where
datediff(now(), creation) > 31""")
| {
"content_hash": "733dcc256252a107b3c84fdf4d5dca94",
"timestamp": "",
"source": "github",
"line_count": 336,
"max_line_length": 166,
"avg_line_length": 35.61904761904762,
"alnum_prop": 0.7293616310160428,
"repo_name": "anandpdoshi/frappe",
"id": "e4ca335098ed6ad8b5b0c9d07b3336ee97444835",
"size": "12069",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "frappe/email/queue.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "285216"
},
{
"name": "HTML",
"bytes": "1349168"
},
{
"name": "JavaScript",
"bytes": "1092822"
},
{
"name": "Python",
"bytes": "1259016"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
} |
import ast
import re
import pep8
"""
Guidelines for writing new hacking checks
- Use only for Nova specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range N3xx. Find the current test with
the highest allocated number and then pick the next value.
- Keep the test method code in the source file ordered based
on the N3xx value.
- List the new rule in the top level HACKING.rst file
- Add test cases for each new rule to nova/tests/unit/test_hacking.py
"""
UNDERSCORE_IMPORT_FILES = []
session_check = re.compile(r"\w*def [a-zA-Z0-9].*[(].*session.*[)]")
cfg_re = re.compile(r".*\scfg\.")
# Excludes oslo.config OptGroup objects
cfg_opt_re = re.compile(r".*[\s\[]cfg\.[a-zA-Z]*Opt\(")
vi_header_re = re.compile(r"^#\s+vim?:.+")
virt_file_re = re.compile(r"\./nova/(?:tests/)?virt/(\w+)/")
virt_import_re = re.compile(
r"^\s*(?:import|from) nova\.(?:tests\.)?virt\.(\w+)")
virt_config_re = re.compile(
r"CONF\.import_opt\('.*?', 'nova\.virt\.(\w+)('|.)")
asse_trueinst_re = re.compile(
r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, "
"(\w|\.|\'|\"|\[|\])+\)\)")
asse_equal_type_re = re.compile(
r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), "
"(\w|\.|\'|\"|\[|\])+\)")
asse_equal_in_end_with_true_or_false_re = re.compile(r"assertEqual\("
r"(\w|[][.'\"])+ in (\w|[][.'\", ])+, (True|False)\)")
asse_equal_in_start_with_true_or_false_re = re.compile(r"assertEqual\("
r"(True|False), (\w|[][.'\"])+ in (\w|[][.'\", ])+\)")
asse_equal_end_with_none_re = re.compile(
r"assertEqual\(.*?,\s+None\)$")
asse_equal_start_with_none_re = re.compile(
r"assertEqual\(None,")
# NOTE(snikitin): Next two regexes weren't united to one for more readability.
# asse_true_false_with_in_or_not_in regex checks
# assertTrue/False(A in B) cases where B argument has no spaces
# asse_true_false_with_in_or_not_in_spaces regex checks cases
# where B argument has spaces and starts/ends with [, ', ".
# For example: [1, 2, 3], "some string", 'another string'.
# We have to separate these regexes to escape a false positives
# results. B argument should have spaces only if it starts
# with [, ", '. Otherwise checking of string
# "assertFalse(A in B and C in D)" will be false positives.
# In this case B argument is "B and C in D".
asse_true_false_with_in_or_not_in = re.compile(r"assert(True|False)\("
r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])+(, .*)?\)")
asse_true_false_with_in_or_not_in_spaces = re.compile(r"assert(True|False)"
r"\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|[][.'\", ])+"
r"[\[|'|\"](, .*)?\)")
asse_raises_regexp = re.compile(r"assertRaisesRegexp\(")
conf_attribute_set_re = re.compile(r"CONF\.[a-z0-9_.]+\s*=\s*\w")
log_translation = re.compile(
r"(.)*LOG\.(audit|error|critical)\(\s*('|\")")
log_translation_info = re.compile(
r"(.)*LOG\.(info)\(\s*(_\(|'|\")")
log_translation_exception = re.compile(
r"(.)*LOG\.(exception)\(\s*(_\(|'|\")")
log_translation_LW = re.compile(
r"(.)*LOG\.(warning|warn)\(\s*(_\(|'|\")")
translated_log = re.compile(
r"(.)*LOG\.(audit|error|info|critical|exception)"
"\(\s*_\(\s*('|\")")
mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
string_translation = re.compile(r"[^_]*_\(\s*('|\")")
underscore_import_check = re.compile(r"(.)*import _(.)*")
import_translation_for_log_or_exception = re.compile(
r"(.)*(from\snova.i18n\simport)\s_")
# We need this for cases where they have created their own _ function.
custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*")
api_version_re = re.compile(r"@.*api_version")
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
decorator_re = re.compile(r"@.*")
http_not_implemented_re = re.compile(r"raise .*HTTPNotImplemented\(")
spawn_re = re.compile(
r".*(eventlet|greenthread)\.(?P<spawn_part>spawn(_n)?)\(.*\)")
contextlib_nested = re.compile(r"^with (contextlib\.)?nested\(")
doubled_words_re = re.compile(
r"\b(then?|[iao]n|i[fst]|but|f?or|at|and|[dt]o)\s+\1\b")
opt_help_text_min_char_count = 10
class BaseASTChecker(ast.NodeVisitor):
"""Provides a simple framework for writing AST-based checks.
Subclasses should implement visit_* methods like any other AST visitor
implementation. When they detect an error for a particular node the
method should call ``self.add_error(offending_node)``. Details about
where in the code the error occurred will be pulled from the node
object.
Subclasses should also provide a class variable named CHECK_DESC to
be used for the human readable error message.
"""
def __init__(self, tree, filename):
"""This object is created automatically by pep8.
:param tree: an AST tree
:param filename: name of the file being analyzed
(ignored by our checks)
"""
self._tree = tree
self._errors = []
def run(self):
"""Called automatically by pep8."""
self.visit(self._tree)
return self._errors
def add_error(self, node, message=None):
"""Add an error caused by a node to the list of errors for pep8."""
message = message or self.CHECK_DESC
error = (node.lineno, node.col_offset, message, self.__class__)
self._errors.append(error)
def _check_call_names(self, call_node, names):
if isinstance(call_node, ast.Call):
if isinstance(call_node.func, ast.Name):
if call_node.func.id in names:
return True
return False
def import_no_db_in_virt(logical_line, filename):
"""Check for db calls from nova/virt
As of grizzly-2 all the database calls have been removed from
nova/virt, and we want to keep it that way.
N307
"""
if "nova/virt" in filename and not filename.endswith("fake.py"):
if logical_line.startswith("from nova import db"):
yield (0, "N307: nova.db import not allowed in nova/virt/*")
def no_db_session_in_public_api(logical_line, filename):
if "db/api.py" in filename:
if session_check.match(logical_line):
yield (0, "N309: public db api methods may not accept session")
def use_timeutils_utcnow(logical_line, filename):
# tools are OK to use the standard datetime module
if "/tools/" in filename:
return
msg = "N310: timeutils.utcnow() must be used instead of datetime.%s()"
datetime_funcs = ['now', 'utcnow']
for f in datetime_funcs:
pos = logical_line.find('datetime.%s' % f)
if pos != -1:
yield (pos, msg % f)
def _get_virt_name(regex, data):
m = regex.match(data)
if m is None:
return None
driver = m.group(1)
# Ignore things we mis-detect as virt drivers in the regex
if driver in ["test_virt_drivers", "driver", "firewall",
"disk", "api", "imagecache", "cpu", "hardware",
"image"]:
return None
return driver
def import_no_virt_driver_import_deps(physical_line, filename):
"""Check virt drivers' modules aren't imported by other drivers
Modules under each virt driver's directory are
considered private to that virt driver. Other drivers
in Nova must not access those drivers. Any code that
is to be shared should be refactored into a common
module
N311
"""
thisdriver = _get_virt_name(virt_file_re, filename)
thatdriver = _get_virt_name(virt_import_re, physical_line)
if (thatdriver is not None and
thisdriver is not None and
thisdriver != thatdriver):
return (0, "N311: importing code from other virt drivers forbidden")
def import_no_virt_driver_config_deps(physical_line, filename):
"""Check virt drivers' config vars aren't used by other drivers
Modules under each virt driver's directory are
considered private to that virt driver. Other drivers
in Nova must not use their config vars. Any config vars
that are to be shared should be moved into a common module
N312
"""
thisdriver = _get_virt_name(virt_file_re, filename)
thatdriver = _get_virt_name(virt_config_re, physical_line)
if (thatdriver is not None and
thisdriver is not None and
thisdriver != thatdriver):
return (0, "N312: using config vars from other virt drivers forbidden")
def capital_cfg_help(logical_line, tokens):
msg = "N313: capitalize help string"
if cfg_re.match(logical_line):
for t in range(len(tokens)):
if tokens[t][1] == "help":
txt = tokens[t + 2][1]
if len(txt) > 1 and txt[1].islower():
yield(0, msg)
def no_vi_headers(physical_line, line_number, lines):
"""Check for vi editor configuration in source files.
By default vi modelines can only appear in the first or
last 5 lines of a source file.
N314
"""
# NOTE(gilliard): line_number is 1-indexed
if line_number <= 5 or line_number > len(lines) - 5:
if vi_header_re.match(physical_line):
return 0, "N314: Don't put vi configuration in source files"
def assert_true_instance(logical_line):
"""Check for assertTrue(isinstance(a, b)) sentences
N316
"""
if asse_trueinst_re.match(logical_line):
yield (0, "N316: assertTrue(isinstance(a, b)) sentences not allowed")
def assert_equal_type(logical_line):
"""Check for assertEqual(type(A), B) sentences
N317
"""
if asse_equal_type_re.match(logical_line):
yield (0, "N317: assertEqual(type(A), B) sentences not allowed")
def assert_equal_none(logical_line):
"""Check for assertEqual(A, None) or assertEqual(None, A) sentences
N318
"""
res = (asse_equal_start_with_none_re.search(logical_line) or
asse_equal_end_with_none_re.search(logical_line))
if res:
yield (0, "N318: assertEqual(A, None) or assertEqual(None, A) "
"sentences not allowed")
def no_translate_debug_logs(logical_line, filename):
"""Check for 'LOG.debug(_('
As per our translation policy,
https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation
we shouldn't translate debug level logs.
* This check assumes that 'LOG' is a logger.
* Use filename so we can start enforcing this in specific folders instead
of needing to do so all at once.
N319
"""
if logical_line.startswith("LOG.debug(_("):
yield(0, "N319 Don't translate debug level logs")
def no_import_translation_in_tests(logical_line, filename):
"""Check for 'from nova.i18n import _'
N337
"""
if 'nova/tests/' in filename:
res = import_translation_for_log_or_exception.match(logical_line)
if res:
yield(0, "N337 Don't import translation in tests")
def no_setting_conf_directly_in_tests(logical_line, filename):
"""Check for setting CONF.* attributes directly in tests
The value can leak out of tests affecting how subsequent tests run.
Using self.flags(option=value) is the preferred method to temporarily
set config options in tests.
N320
"""
if 'nova/tests/' in filename:
res = conf_attribute_set_re.match(logical_line)
if res:
yield (0, "N320: Setting CONF.* attributes directly in tests is "
"forbidden. Use self.flags(option=value) instead")
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test directory
# and the Xen utilities
if ("nova/tests" in filename or
"plugins/xenserver/xenapi/etc/xapi.d" in filename):
return
if pep8.noqa(physical_line):
return
msg = "N328: LOG.info messages require translations `_LI()`!"
if log_translation_info.match(logical_line):
yield (0, msg)
msg = "N329: LOG.exception messages require translations `_LE()`!"
if log_translation_exception.match(logical_line):
yield (0, msg)
msg = "N330: LOG.warning, LOG.warn messages require translations `_LW()`!"
if log_translation_LW.match(logical_line):
yield (0, msg)
msg = "N321: Log messages require translations!"
if log_translation.match(logical_line):
yield (0, msg)
def no_mutable_default_args(logical_line):
msg = "N322: Method's default argument shouldn't be mutable!"
if mutable_default_args.match(logical_line):
yield (0, msg)
def check_explicit_underscore_import(logical_line, filename):
"""Check for explicit import of the _ function
We need to ensure that any files that are using the _() function
to translate logs are explicitly importing the _ function. We
can't trust unit test to catch whether the import has been
added so we need to check for it here.
"""
# Build a list of the files that have _ imported. No further
# checking needed once it is found.
if filename in UNDERSCORE_IMPORT_FILES:
pass
elif (underscore_import_check.match(logical_line) or
custom_underscore_check.match(logical_line)):
UNDERSCORE_IMPORT_FILES.append(filename)
elif (translated_log.match(logical_line) or
string_translation.match(logical_line)):
yield(0, "N323: Found use of _() without explicit import of _ !")
def use_jsonutils(logical_line, filename):
# the code below that path is not meant to be executed from neutron
# tree where jsonutils module is present, so don't enforce its usage
# for this subdirectory
if "plugins/xenserver" in filename:
return
# tools are OK to use the standard json module
if "/tools/" in filename:
return
msg = "N324: jsonutils.%(fun)s must be used instead of json.%(fun)s"
if "json." in logical_line:
json_funcs = ['dumps(', 'dump(', 'loads(', 'load(']
for f in json_funcs:
pos = logical_line.find('json.%s' % f)
if pos != -1:
yield (pos, msg % {'fun': f[:-1]})
def check_api_version_decorator(logical_line, previous_logical, blank_before,
filename):
msg = ("N332: the api_version decorator must be the first decorator"
" on a method.")
if blank_before == 0 and re.match(api_version_re, logical_line) \
and re.match(decorator_re, previous_logical):
yield(0, msg)
class CheckForStrUnicodeExc(BaseASTChecker):
"""Checks for the use of str() or unicode() on an exception.
This currently only handles the case where str() or unicode()
is used in the scope of an exception handler. If the exception
is passed into a function, returned from an assertRaises, or
used on an exception created in the same scope, this does not
catch it.
"""
CHECK_DESC = ('N325 str() and unicode() cannot be used on an '
'exception. Remove or use six.text_type()')
def __init__(self, tree, filename):
super(CheckForStrUnicodeExc, self).__init__(tree, filename)
self.name = []
self.already_checked = []
def visit_TryExcept(self, node):
for handler in node.handlers:
if handler.name:
self.name.append(handler.name.id)
super(CheckForStrUnicodeExc, self).generic_visit(node)
self.name = self.name[:-1]
else:
super(CheckForStrUnicodeExc, self).generic_visit(node)
def visit_Call(self, node):
if self._check_call_names(node, ['str', 'unicode']):
if node not in self.already_checked:
self.already_checked.append(node)
if isinstance(node.args[0], ast.Name):
if node.args[0].id in self.name:
self.add_error(node.args[0])
super(CheckForStrUnicodeExc, self).generic_visit(node)
class CheckForTransAdd(BaseASTChecker):
"""Checks for the use of concatenation on a translated string.
Translations should not be concatenated with other strings, but
should instead include the string being added to the translated
string to give the translators the most information.
"""
CHECK_DESC = ('N326 Translated messages cannot be concatenated. '
'String should be included in translated message.')
TRANS_FUNC = ['_', '_LI', '_LW', '_LE', '_LC']
def visit_BinOp(self, node):
if isinstance(node.op, ast.Add):
if self._check_call_names(node.left, self.TRANS_FUNC):
self.add_error(node.left)
elif self._check_call_names(node.right, self.TRANS_FUNC):
self.add_error(node.right)
super(CheckForTransAdd, self).generic_visit(node)
def assert_true_or_false_with_in(logical_line):
"""Check for assertTrue/False(A in B), assertTrue/False(A not in B),
assertTrue/False(A in B, message) or assertTrue/False(A not in B, message)
sentences.
N334
"""
res = (asse_true_false_with_in_or_not_in.search(logical_line) or
asse_true_false_with_in_or_not_in_spaces.search(logical_line))
if res:
yield (0, "N334: Use assertIn/NotIn(A, B) rather than "
"assertTrue/False(A in/not in B) when checking collection "
"contents.")
def assert_raises_regexp(logical_line):
"""Check for usage of deprecated assertRaisesRegexp
N335
"""
res = asse_raises_regexp.search(logical_line)
if res:
yield (0, "N335: assertRaisesRegex must be used instead "
"of assertRaisesRegexp")
def dict_constructor_with_list_copy(logical_line):
msg = ("N336: Must use a dict comprehension instead of a dict constructor"
" with a sequence of key-value pairs."
)
if dict_constructor_with_list_copy_re.match(logical_line):
yield (0, msg)
def assert_equal_in(logical_line):
"""Check for assertEqual(A in B, True), assertEqual(True, A in B),
assertEqual(A in B, False) or assertEqual(False, A in B) sentences
N338
"""
res = (asse_equal_in_start_with_true_or_false_re.search(logical_line) or
asse_equal_in_end_with_true_or_false_re.search(logical_line))
if res:
yield (0, "N338: Use assertIn/NotIn(A, B) rather than "
"assertEqual(A in B, True/False) when checking collection "
"contents.")
def check_http_not_implemented(logical_line, physical_line, filename):
msg = ("N339: HTTPNotImplemented response must be implemented with"
" common raise_feature_not_supported().")
if pep8.noqa(physical_line):
return
if ("nova/api/openstack/compute/legacy_v2" in filename or
"nova/api/openstack/compute" not in filename):
return
if re.match(http_not_implemented_re, logical_line):
yield(0, msg)
def check_greenthread_spawns(logical_line, physical_line, filename):
"""Check for use of greenthread.spawn(), greenthread.spawn_n(),
eventlet.spawn(), and eventlet.spawn_n()
N340
"""
msg = ("N340: Use nova.utils.%(spawn)s() rather than "
"greenthread.%(spawn)s() and eventlet.%(spawn)s()")
if "nova/utils.py" in filename or "nova/tests/" in filename:
return
match = re.match(spawn_re, logical_line)
if match:
yield (0, msg % {'spawn': match.group('spawn_part')})
def check_no_contextlib_nested(logical_line, filename):
msg = ("N341: contextlib.nested is deprecated. With Python 2.7 and later "
"the with-statement supports multiple nested objects. See https://"
"docs.python.org/2/library/contextlib.html#contextlib.nested for "
"more information. nova.test.nested() is an alternative as well.")
if contextlib_nested.match(logical_line):
yield(0, msg)
def check_config_option_in_central_place(logical_line, filename):
msg = ("N342: Config options should be in the central location "
"'/nova/conf/*'. Do not declare new config options outside "
"of that folder.")
# That's the correct location
if "nova/conf/" in filename:
return
# TODO(markus_z) This is just temporary until all config options are
# moved to the central place. To avoid that a once cleaned up place
# introduces new config options, we do a check here. This array will
# get quite huge over the time, but will be removed at the end of the
# reorganization.
# You can add the full path to a module or folder. It's just a substring
# check, which makes it flexible enough.
cleaned_up = ["nova/console/serial.py",
"nova/cmd/serialproxy.py",
]
if not any(c in filename for c in cleaned_up):
return
if cfg_opt_re.match(logical_line):
yield(0, msg)
def check_doubled_words(physical_line, filename):
"""Check for the common doubled-word typos
N343
"""
msg = ("N343: Doubled word '%(word)s' typo found")
match = re.search(doubled_words_re, physical_line)
if match:
return (0, msg % {'word': match.group(1)})
def check_python3_no_iteritems(logical_line):
msg = ("N344: Use six.iteritems() instead of dict.iteritems().")
if re.search(r".*\.iteritems\(\)", logical_line):
yield(0, msg)
def check_python3_no_iterkeys(logical_line):
msg = ("N345: Use six.iterkeys() instead of dict.iterkeys().")
if re.search(r".*\.iterkeys\(\)", logical_line):
yield(0, msg)
def check_python3_no_itervalues(logical_line):
msg = ("N346: Use six.itervalues() instead of dict.itervalues().")
if re.search(r".*\.itervalues\(\)", logical_line):
yield(0, msg)
def cfg_help_with_enough_text(logical_line, tokens):
# TODO(markus_z): The count of 10 chars is the *highest* number I could
# use to introduce this new check without breaking the gate. IOW, if I
# use a value of 15 for example, the gate checks will fail because we have
# a few config options which use fewer chars than 15 to explain their
# usage (for example the options "ca_file" and "cert").
# As soon as the implementation of bp centralize-config-options is
# finished, I wanted to increase that magic number to a higher (to be
# defined) value.
# This check is an attempt to programmatically check a part of the review
# guidelines http://docs.openstack.org/developer/nova/code-review.html
msg = ("N347: A config option is a public interface to the cloud admins "
"and should be properly documented. A part of that is to provide "
"enough help text to describe this option. Use at least %s chars "
"for that description. Is is likely that this minimum will be "
"increased in the future." % opt_help_text_min_char_count)
if not cfg_opt_re.match(logical_line):
return
# ignore DeprecatedOpt objects. They get mentioned in the release notes
# and don't need a lengthy help text anymore
if "DeprecatedOpt" in logical_line:
return
def get_token_value(idx):
return tokens[idx][1]
def get_token_values(start_index, length):
values = ""
for offset in range(length):
values += get_token_value(start_index + offset)
return values
def get_help_token_index():
for idx in range(len(tokens)):
if get_token_value(idx) == "help":
return idx
return -1
def has_help():
return get_help_token_index() >= 0
def get_trimmed_help_text(t):
txt = ""
# len(["help", "=", "_", "("]) ==> 4
if get_token_values(t, 4) == "help=_(":
txt = get_token_value(t + 4)
# len(["help", "=", "("]) ==> 3
elif get_token_values(t, 3) == "help=(":
txt = get_token_value(t + 3)
# len(["help", "="]) ==> 2
else:
txt = get_token_value(t + 2)
return " ".join(txt.strip('\"\'').split())
def has_enough_help_text(txt):
return len(txt) >= opt_help_text_min_char_count
if has_help():
t = get_help_token_index()
txt = get_trimmed_help_text(t)
if not has_enough_help_text(txt):
yield(0, msg)
else:
yield(0, msg)
def factory(register):
register(import_no_db_in_virt)
register(no_db_session_in_public_api)
register(use_timeutils_utcnow)
register(import_no_virt_driver_import_deps)
register(import_no_virt_driver_config_deps)
register(capital_cfg_help)
register(no_vi_headers)
register(no_import_translation_in_tests)
register(assert_true_instance)
register(assert_equal_type)
register(assert_equal_none)
register(assert_raises_regexp)
register(no_translate_debug_logs)
register(no_setting_conf_directly_in_tests)
register(validate_log_translations)
register(no_mutable_default_args)
register(check_explicit_underscore_import)
register(use_jsonutils)
register(check_api_version_decorator)
register(CheckForStrUnicodeExc)
register(CheckForTransAdd)
register(assert_true_or_false_with_in)
register(dict_constructor_with_list_copy)
register(assert_equal_in)
register(check_http_not_implemented)
register(check_no_contextlib_nested)
register(check_greenthread_spawns)
register(check_config_option_in_central_place)
register(check_doubled_words)
register(check_python3_no_iteritems)
register(check_python3_no_iterkeys)
register(check_python3_no_itervalues)
register(cfg_help_with_enough_text)
| {
"content_hash": "0ddc8cbde5658dd614ac7fa6718cba27",
"timestamp": "",
"source": "github",
"line_count": 704,
"max_line_length": 79,
"avg_line_length": 36.86079545454545,
"alnum_prop": 0.6252793834296725,
"repo_name": "dims/nova",
"id": "6a0ab2fbdc5db645c6caea2068ab725f1e68cfe5",
"size": "26556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/hacking/checks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16952469"
},
{
"name": "Shell",
"bytes": "36658"
},
{
"name": "Smarty",
"bytes": "317320"
}
],
"symlink_target": ""
} |
import os
import util.dict_utils as du
dataset_names = [
'rgbd_dataset_freiburg1_xyz',
'rgbd_dataset_freiburg1_rpy',
'rgbd_dataset_freiburg2_xyz',
'rgbd_dataset_freiburg2_rpy',
'rgbd_dataset_freiburg1_360',
'rgbd_dataset_freiburg1_floor',
'rgbd_dataset_freiburg1_desk',
'rgbd_dataset_freiburg1_desk2',
'rgbd_dataset_freiburg1_room',
'rgbd_dataset_freiburg2_360_hemisphere',
'rgbd_dataset_freiburg2_360_kidnap',
'rgbd_dataset_freiburg2_desk',
'rgbd_dataset_freiburg2_large_no_loop',
'rgbd_dataset_freiburg2_large_with_loop',
'rgbd_dataset_freiburg3_long_office_household',
'rgbd_dataset_freiburg2_pioneer_360',
'rgbd_dataset_freiburg2_pioneer_slam',
'rgbd_dataset_freiburg2_pioneer_slam2',
'rgbd_dataset_freiburg2_pioneer_slam3',
'rgbd_dataset_freiburg3_nostructure_notexture_far',
'rgbd_dataset_freiburg3_nostructure_notexture_near_withloop',
'rgbd_dataset_freiburg3_nostructure_texture_far',
'rgbd_dataset_freiburg3_nostructure_texture_near_withloop',
'rgbd_dataset_freiburg3_structure_notexture_far',
'rgbd_dataset_freiburg3_structure_notexture_near',
'rgbd_dataset_freiburg3_structure_texture_far',
'rgbd_dataset_freiburg3_structure_texture_near',
'rgbd_dataset_freiburg2_desk_with_person',
'rgbd_dataset_freiburg3_sitting_static',
'rgbd_dataset_freiburg3_sitting_xyz',
'rgbd_dataset_freiburg3_sitting_halfsphere',
'rgbd_dataset_freiburg3_sitting_rpy',
'rgbd_dataset_freiburg3_walking_static',
'rgbd_dataset_freiburg3_walking_xyz',
'rgbd_dataset_freiburg3_walking_halfsphere',
'rgbd_dataset_freiburg3_walking_rpy',
'rgbd_dataset_freiburg1_plant',
'rgbd_dataset_freiburg1_teddy',
'rgbd_dataset_freiburg2_coke',
'rgbd_dataset_freiburg2_dishes',
'rgbd_dataset_freiburg2_flowerbouquet',
'rgbd_dataset_freiburg2_flowerbouquet_brownbackground',
'rgbd_dataset_freiburg2_metallic_sphere',
'rgbd_dataset_freiburg2_metallic_sphere2',
'rgbd_dataset_freiburg3_cabinet',
'rgbd_dataset_freiburg3_large_cabinet',
'rgbd_dataset_freiburg3_teddy'
]
class TUMManager:
def __init__(self, config, dataset_ids=None):
self._config = {name: False for name in dataset_names}
self._dataset_ids = {}
for key in self._config.keys():
if key in config and bool(config[key]):
self._config[key] = True
if dataset_ids is not None:
du.defaults(self._dataset_ids, dataset_ids)
@property
def dataset_ids(self):
"""
Get a set of all the dataset ids
:return:
"""
return set(self._dataset_ids.values())
@property
def datasets(self):
"""
A generator of name -> id pairs
:return:
"""
return self._dataset_ids.items()
def do_imports(self, root_folder, task_manager):
to_import = {dataset_name for dataset_name, do_import in self._config.items()
if bool(do_import) and (dataset_name not in self._dataset_ids or
self._dataset_ids[dataset_name] is None)}
# Recursively search for the directories to import from the root folder
full_paths = set()
for dirpath, subdirs, _ in os.walk(root_folder):
for subdir in subdirs:
if subdir in to_import:
full_paths.add((subdir, os.path.join(dirpath, subdir)))
# Create tasks for tall the paths we found
for dataset_folder, full_path in full_paths:
import_dataset_task = task_manager.get_import_dataset_task(
module_name='dataset.tum.tum_loader',
path=full_path,
num_cpus=1,
num_gpus=0,
memory_requirements='3GB',
expected_duration='8:00:00'
)
if import_dataset_task.is_finished:
self._dataset_ids[dataset_folder] = import_dataset_task.result
else:
task_manager.do_task(import_dataset_task)
def serialize(self):
return {
'config': self._config,
'dataset_ids': self._dataset_ids
}
@classmethod
def deserialize(cls, serialized, **kwargs):
config = {}
dataset_ids = {}
if 'config' in serialized:
du.defaults(config, serialized['config'])
if 'dataset_ids' in serialized:
du.defaults(dataset_ids, serialized['dataset_ids'])
return cls(config, dataset_ids, **kwargs)
# Add read-only properties to the manager class for each of the datasets
# This means that specific datasets can be requested as tum_manager.rgbd_dataset_freiburg1_xyz
for _name in dataset_names:
setattr(TUMManager, _name, property(lambda self: self._dataset_ids[_name] if _name in self._dataset_ids else None))
| {
"content_hash": "7cd4922f414a7f2c0126bf8f828d7054",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 119,
"avg_line_length": 37.44274809160305,
"alnum_prop": 0.6389398572884811,
"repo_name": "jskinn/robot-vision-experiment-framework",
"id": "8bd032dc5ce0a0be33118920113a66c5ede97e68",
"size": "4940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dataset/tum/tum_manager.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1614485"
},
{
"name": "Shell",
"bytes": "392"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('myapp', '0008_auto_20150701_1012'),
]
operations = [
migrations.AlterField(
model_name='value',
name='timestamp',
field=models.DateTimeField(),
preserve_default=True,
),
]
| {
"content_hash": "4ab1aad3be8e41e15f252743f2387fec",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 45,
"avg_line_length": 21.263157894736842,
"alnum_prop": 0.5816831683168316,
"repo_name": "mpetyx/energagement",
"id": "2f8c1b23c4ccfd6ebb7534b95aa2bdc532eeb10b",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "energagement/myapp/migrations/0009_auto_20150711_2334.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22435"
},
{
"name": "HTML",
"bytes": "234304"
},
{
"name": "JavaScript",
"bytes": "58711"
},
{
"name": "Python",
"bytes": "82307"
}
],
"symlink_target": ""
} |
"""
Cross Site Request Forgery Middleware.
This module provides a middleware that implements protection
against request forgeries from other sites.
"""
from __future__ import unicode_literals
import hashlib
import logging
import re
import random
from django.conf import settings
from django.core.urlresolvers import get_callable
from django.utils.cache import patch_vary_headers
from django.utils.encoding import force_text
from django.utils.http import same_origin
from django.utils.crypto import constant_time_compare, get_random_string
logger = logging.getLogger('django.request')
REASON_NO_REFERER = "Referer checking failed - no Referer."
REASON_BAD_REFERER = "Referer checking failed - %s does not match %s."
REASON_NO_CSRF_COOKIE = "CSRF cookie not set."
REASON_BAD_TOKEN = "CSRF token missing or incorrect."
CSRF_KEY_LENGTH = 32
def _get_failure_view():
"""
Returns the view to be used for CSRF rejections
"""
return get_callable(settings.CSRF_FAILURE_VIEW)
def _get_new_csrf_key():
return get_random_string(CSRF_KEY_LENGTH)
def get_token(request):
"""
Returns the CSRF token required for a POST form. The token is an
alphanumeric value.
A side effect of calling this function is to make the csrf_protect
decorator and the CsrfViewMiddleware add a CSRF cookie and a 'Vary: Cookie'
header to the outgoing response. For this reason, you may need to use this
function lazily, as is done by the csrf context processor.
"""
request.META["CSRF_COOKIE_USED"] = True
return request.META.get("CSRF_COOKIE", None)
def _sanitize_token(token):
# Allow only alphanum
if len(token) > CSRF_KEY_LENGTH:
return _get_new_csrf_key()
token = re.sub('[^a-zA-Z0-9]+', '', force_text(token))
if token == "":
# In case the cookie has been truncated to nothing at some point.
return _get_new_csrf_key()
return token
class CsrfViewMiddleware(object):
"""
Middleware that requires a present and correct csrfmiddlewaretoken
for POST requests that have a CSRF cookie, and sets an outgoing
CSRF cookie.
This middleware should be used in conjunction with the csrf_token template
tag.
"""
# The _accept and _reject methods currently only exist for the sake of the
# requires_csrf_token decorator.
def _accept(self, request):
# Avoid checking the request twice by adding a custom attribute to
# request. This will be relevant when both decorator and middleware
# are used.
request.csrf_processing_done = True
return None
def _reject(self, request, reason):
return _get_failure_view()(request, reason=reason)
def process_view(self, request, callback, callback_args, callback_kwargs):
if getattr(request, 'csrf_processing_done', False):
return None
try:
csrf_token = _sanitize_token(
request.COOKIES[settings.CSRF_COOKIE_NAME])
# Use same token next time
request.META['CSRF_COOKIE'] = csrf_token
except KeyError:
csrf_token = None
# Generate token and store it in the request, so it's
# available to the view.
request.META["CSRF_COOKIE"] = _get_new_csrf_key()
# Wait until request.META["CSRF_COOKIE"] has been manipulated before
# bailing out, so that get_token still works
if getattr(callback, 'csrf_exempt', False):
return None
# Assume that anything not defined as 'safe' by RFC2616 needs protection
if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
if getattr(request, '_dont_enforce_csrf_checks', False):
# Mechanism to turn off CSRF checks for test suite.
# It comes after the creation of CSRF cookies, so that
# everything else continues to work exactly the same
# (e.g. cookies are sent, etc.), but before any
# branches that call reject().
return self._accept(request)
if request.is_secure():
# Suppose user visits http://example.com/
# An active network attacker (man-in-the-middle, MITM) sends a
# POST form that targets https://example.com/detonate-bomb/ and
# submits it via JavaScript.
#
# The attacker will need to provide a CSRF cookie and token, but
# that's no problem for a MITM and the session-independent
# nonce we're using. So the MITM can circumvent the CSRF
# protection. This is true for any HTTP connection, but anyone
# using HTTPS expects better! For this reason, for
# https://example.com/ we need additional protection that treats
# http://example.com/ as completely untrusted. Under HTTPS,
# Barth et al. found that the Referer header is missing for
# same-domain requests in only about 0.2% of cases or less, so
# we can use strict Referer checking.
referer = request.META.get('HTTP_REFERER')
if referer is None:
logger.warning('Forbidden (%s): %s',
REASON_NO_REFERER, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_NO_REFERER)
# Note that request.get_host() includes the port.
good_referer = 'https://%s/' % request.get_host()
if not same_origin(referer, good_referer):
reason = REASON_BAD_REFERER % (referer, good_referer)
logger.warning('Forbidden (%s): %s', reason, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, reason)
if csrf_token is None:
# No CSRF cookie. For POST requests, we insist on a CSRF cookie,
# and in this way we can avoid all CSRF attacks, including login
# CSRF.
logger.warning('Forbidden (%s): %s',
REASON_NO_CSRF_COOKIE, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_NO_CSRF_COOKIE)
# Check non-cookie token for match.
request_csrf_token = ""
if request.method == "POST":
request_csrf_token = request.POST.get('csrfmiddlewaretoken', '')
if request_csrf_token == "":
# Fall back to X-CSRFToken, to make things easier for AJAX,
# and possible for PUT/DELETE.
request_csrf_token = request.META.get('HTTP_X_CSRFTOKEN', '')
if not constant_time_compare(request_csrf_token, csrf_token):
logger.warning('Forbidden (%s): %s',
REASON_BAD_TOKEN, request.path,
extra={
'status_code': 403,
'request': request,
}
)
return self._reject(request, REASON_BAD_TOKEN)
return self._accept(request)
def process_response(self, request, response):
if getattr(response, 'csrf_processing_done', False):
return response
# If CSRF_COOKIE is unset, then CsrfViewMiddleware.process_view was
# never called, probaby because a request middleware returned a response
# (for example, contrib.auth redirecting to a login page).
if request.META.get("CSRF_COOKIE") is None:
return response
if not request.META.get("CSRF_COOKIE_USED", False):
return response
# Set the CSRF cookie even if it's already set, so we renew
# the expiry timer.
response.set_cookie(settings.CSRF_COOKIE_NAME,
request.META["CSRF_COOKIE"],
max_age = 60 * 60 * 24 * 7 * 52,
domain=settings.CSRF_COOKIE_DOMAIN,
path=settings.CSRF_COOKIE_PATH,
secure=settings.CSRF_COOKIE_SECURE,
httponly=settings.CSRF_COOKIE_HTTPONLY
)
# Content varies with the CSRF cookie, so set the Vary header.
patch_vary_headers(response, ('Cookie',))
response.csrf_processing_done = True
return response
| {
"content_hash": "2a2416c2458ecd56e7b933a41ee42500",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 80,
"avg_line_length": 40.77625570776256,
"alnum_prop": 0.5764837625979843,
"repo_name": "mammique/django",
"id": "423034478b4cb00bd4a882395e6fc486b38d6d17",
"size": "8930",
"binary": false,
"copies": "7",
"ref": "refs/heads/tp_alpha",
"path": "django/middleware/csrf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "84061"
},
{
"name": "Python",
"bytes": "8016393"
},
{
"name": "Shell",
"bytes": "12116"
}
],
"symlink_target": ""
} |
from ....testing import assert_equal
from ..preprocess import Bandpass
def test_Bandpass_inputs():
input_map = dict(args=dict(argstr='%s',
),
automask=dict(argstr='-automask',
),
blur=dict(argstr='-blur %f',
),
despike=dict(argstr='-despike',
),
environ=dict(nohash=True,
usedefault=True,
),
highpass=dict(argstr='%f',
mandatory=True,
position=-3,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='%s',
copyfile=False,
mandatory=True,
position=-1,
),
localPV=dict(argstr='-localPV %f',
),
lowpass=dict(argstr='%f',
mandatory=True,
position=-2,
),
mask=dict(argstr='-mask %s',
position=2,
),
nfft=dict(argstr='-nfft %d',
),
no_detrend=dict(argstr='-nodetrend',
),
normalize=dict(argstr='-norm',
),
notrans=dict(argstr='-notrans',
),
orthogonalize_dset=dict(argstr='-dsort %s',
),
orthogonalize_file=dict(argstr='-ort %s',
),
out_file=dict(argstr='-prefix %s',
genfile=True,
name_source='in_file',
name_template='%s_bp',
position=1,
),
outputtype=dict(),
terminal_output=dict(nohash=True,
),
tr=dict(argstr='-dt %f',
),
)
inputs = Bandpass.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Bandpass_outputs():
output_map = dict(out_file=dict(),
)
outputs = Bandpass.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| {
"content_hash": "5f8f16f4acca5ebcb0e76a68e0828245",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 78,
"avg_line_length": 23.56578947368421,
"alnum_prop": 0.5901730876605249,
"repo_name": "carolFrohlich/nipype",
"id": "519d8fd501afd5f32af6c95c42a805425728147e",
"size": "1845",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "nipype/interfaces/afni/tests/test_auto_Bandpass.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "2320"
},
{
"name": "Matlab",
"bytes": "1717"
},
{
"name": "Python",
"bytes": "5451077"
},
{
"name": "Shell",
"bytes": "3302"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
'''
Linode Cloud Module using Linode's REST API
===========================================
The Linode cloud module is used to control access to the Linode VPS system.
Use of this module only requires the ``apikey`` parameter. However, the default root password for new instances
also needs to be set. The password needs to be 8 characters and contain lowercase, uppercase, and numbers.
Set up the cloud configuration at ``/etc/salt/cloud.providers`` or ``/etc/salt/cloud.providers.d/linode.conf``:
.. code-block:: yaml
my-linode-provider:
apikey: f4ZsmwtB1c7f85Jdu43RgXVDFlNjuJaeIYV8QMftTqKScEB2vSosFSr...
password: F00barbaz
driver: linode
linode-profile:
provider: my-linode-provider
size: Linode 1024
image: CentOS 7
location: London, England, UK
'''
# Import Python Libs
from __future__ import absolute_import
import logging
import pprint
import re
import time
import datetime
# Import Salt Libs
import salt.config as config
import salt.ext.six as six
from salt.ext.six.moves import range
from salt.exceptions import (
SaltCloudConfigError,
SaltCloudException,
SaltCloudNotFound,
SaltCloudSystemExit
)
from salt.utils import warn_until
# Import Salt-Cloud Libs
import salt.utils.cloud
# Get logging started
log = logging.getLogger(__name__)
# The epoch of the last time a query was made
LASTCALL = int(time.mktime(datetime.datetime.now().timetuple()))
# Human-readable status fields (documentation: https://www.linode.com/api/linode/linode.list)
LINODE_STATUS = {
'boot_failed': {
'code': -2,
'descr': 'Boot Failed (not in use)',
},
'beeing_created': {
'code': -1,
'descr': 'Being Created',
},
'brand_new': {
'code': 0,
'descr': 'Brand New',
},
'running': {
'code': 1,
'descr': 'Running',
},
'poweroff': {
'code': 2,
'descr': 'Powered Off',
},
'shutdown': {
'code': 3,
'descr': 'Shutting Down (not in use)',
},
'save_to_disk': {
'code': 4,
'descr': 'Saved to Disk (not in use)',
},
}
__virtualname__ = 'linode'
# Only load in this module if the Linode configurations are in place
def __virtual__():
'''
Check for Linode configs.
'''
if get_configured_provider() is False:
return False
return __virtualname__
def get_configured_provider():
'''
Return the first configured instance.
'''
return config.is_provider_configured(
__opts__,
__active_provider_name__ or __virtualname__,
('apikey', 'password',)
)
def avail_images(call=None):
'''
Return available Linode images.
CLI Example:
.. code-block:: bash
salt-cloud --list-images my-linode-config
salt-cloud -f avail_images my-linode-config
'''
if call == 'action':
raise SaltCloudException(
'The avail_images function must be called with -f or --function.'
)
response = _query('avail', 'distributions')
ret = {}
for item in response['DATA']:
name = item['LABEL']
ret[name] = item
return ret
def avail_locations(call=None):
'''
Return available Linode datacenter locations.
CLI Example:
.. code-block:: bash
salt-cloud --list-locations my-linode-config
salt-cloud -f avail_locations my-linode-config
'''
if call == 'action':
raise SaltCloudException(
'The avail_locations function must be called with -f or --function.'
)
response = _query('avail', 'datacenters')
ret = {}
for item in response['DATA']:
name = item['LOCATION']
ret[name] = item
return ret
def avail_sizes(call=None):
'''
Return available Linode sizes.
CLI Example:
.. code-block:: bash
salt-cloud --list-sizes my-linode-config
salt-cloud -f avail_sizes my-linode-config
'''
if call == 'action':
raise SaltCloudException(
'The avail_locations function must be called with -f or --function.'
)
response = _query('avail', 'LinodePlans')
ret = {}
for item in response['DATA']:
name = item['LABEL']
ret[name] = item
return ret
def boot(name=None, kwargs=None, call=None):
'''
Boot a Linode.
name
The name of the Linode to boot. Can be used instead of ``linode_id``.
linode_id
The ID of the Linode to boot. If provided, will be used as an
alternative to ``name`` and reduces the number of API calls to
Linode by one. Will be preferred over ``name``.
config_id
The ID of the Config to boot. Required.
check_running
Defaults to True. If set to False, overrides the call to check if
the VM is running before calling the linode.boot API call. Change
``check_running`` to True is useful during the boot call in the
create function, since the new VM will not be running yet.
Can be called as an action (which requires a name):
.. code-block:: bash
salt-cloud -a boot my-instance config_id=10
...or as a function (which requires either a name or linode_id):
.. code-block:: bash
salt-cloud -f boot my-linode-config name=my-instance config_id=10
salt-cloud -f boot my-linode-config linode_id=1225876 config_id=10
'''
if name is None and call == 'action':
raise SaltCloudSystemExit(
'The boot action requires a \'name\'.'
)
if kwargs is None:
kwargs = {}
linode_id = kwargs.get('linode_id', None)
config_id = kwargs.get('config_id', None)
check_running = kwargs.get('check_running', True)
if call == 'function':
name = kwargs.get('name', None)
if name is None and linode_id is None:
raise SaltCloudSystemExit(
'The boot function requires either a \'name\' or a \'linode_id\'.'
)
if config_id is None:
raise SaltCloudSystemExit(
'The boot function requires a \'config_id\'.'
)
if linode_id is None:
linode_id = get_linode_id_from_name(name)
linode_item = name
else:
linode_item = linode_id
# Check if Linode is running first
if check_running is True:
status = get_linode(kwargs={'linode_id': linode_id})['STATUS']
if status == '1':
raise SaltCloudSystemExit(
'Cannot boot Linode {0}. '
'Linode {0} is already running.'.format(linode_item)
)
# Boot the VM and get the JobID from Linode
response = _query('linode', 'boot',
args={'LinodeID': linode_id,
'ConfigID': config_id})['DATA']
boot_job_id = response['JobID']
if not _wait_for_job(linode_id, boot_job_id):
log.error('Boot failed for Linode {0}.'.format(linode_item))
return False
return True
def clone(kwargs=None, call=None):
'''
Clone a Linode.
linode_id
The ID of the Linode to clone. Required.
datacenter_id
The ID of the Datacenter where the Linode will be placed. Required.
plan_id
The ID of the plan (size) of the Linode. Required.
CLI Example:
.. code-block:: bash
salt-cloud -f clone my-linode-config linode_id=1234567 datacenter_id=2 plan_id=5
'''
if call == 'action':
raise SaltCloudSystemExit(
'The clone function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
linode_id = kwargs.get('linode_id', None)
datacenter_id = kwargs.get('datacenter_id', None)
plan_id = kwargs.get('plan_id', None)
required_params = [linode_id, datacenter_id, plan_id]
for item in required_params:
if item is None:
raise SaltCloudSystemExit(
'The clone function requires a \'linode_id\', \'datacenter_id\', '
'and \'plan_id\' to be provided.'
)
clone_args = {
'LinodeID': linode_id,
'DatacenterID': datacenter_id,
'PlanID': plan_id
}
return _query('linode', 'clone', args=clone_args)
def create(vm_):
'''
Create a single Linode VM.
'''
name = vm_['name']
try:
# Check for required profile parameters before sending any API calls.
if vm_['profile'] and config.is_profile_configured(__opts__,
__active_provider_name__ or 'linode',
vm_['profile'],
vm_=vm_) is False:
return False
except AttributeError:
pass
# Since using "provider: <provider-engine>" is deprecated, alias provider
# to use driver: "driver: <provider-engine>"
if 'provider' in vm_:
vm_['driver'] = vm_.pop('provider')
if _validate_name(name) is False:
return False
__utils__['cloud.fire_event'](
'event',
'starting create',
'salt/cloud/{0}/creating'.format(name),
{
'name': name,
'profile': vm_['profile'],
'provider': vm_['driver'],
},
transport=__opts__['transport']
)
log.info('Creating Cloud VM {0}'.format(name))
data = {}
kwargs = {'name': name}
plan_id = None
size = vm_.get('size')
if size:
kwargs['size'] = size
plan_id = get_plan_id(kwargs={'label': size})
datacenter_id = None
location = vm_.get('location')
if location:
try:
datacenter_id = get_datacenter_id(location)
except KeyError:
# Linode's default datacenter is Dallas, but we still have to set one to
# use the create function from Linode's API. Dallas's datacenter id is 2.
datacenter_id = 2
clonefrom_name = vm_.get('clonefrom')
cloning = True if clonefrom_name else False
if cloning:
linode_id = get_linode_id_from_name(clonefrom_name)
clone_source = get_linode(kwargs={'linode_id': linode_id})
kwargs = {
'clonefrom': clonefrom_name,
'image': 'Clone of {0}'.format(clonefrom_name),
}
if size is None:
size = clone_source['TOTALRAM']
kwargs['size'] = size
plan_id = clone_source['PLANID']
if location is None:
datacenter_id = clone_source['DATACENTERID']
# Create new Linode from cloned Linode
try:
result = clone(kwargs={'linode_id': linode_id,
'datacenter_id': datacenter_id,
'plan_id': plan_id})
except Exception as err:
log.error(
'Error cloning \'{0}\' on Linode.\n\n'
'The following exception was thrown by Linode when trying to '
'clone the specified machine:\n'
'{1}'.format(
clonefrom_name,
err
),
exc_info_on_loglevel=logging.DEBUG
)
return False
else:
kwargs['image'] = vm_['image']
# Create Linode
try:
result = _query('linode', 'create', args={
'PLANID': plan_id,
'DATACENTERID': datacenter_id
})
except Exception as err:
log.error(
'Error creating {0} on Linode\n\n'
'The following exception was thrown by Linode when trying to '
'run the initial deployment:\n'
'{1}'.format(
name,
err
),
exc_info_on_loglevel=logging.DEBUG
)
return False
__utils__['cloud.fire_event'](
'event',
'requesting instance',
'salt/cloud/{0}/requesting'.format(name),
{'kwargs': kwargs},
transport=__opts__['transport']
)
node_id = _clean_data(result)['LinodeID']
data['id'] = node_id
if not _wait_for_status(node_id, status=(_get_status_id_by_name('brand_new'))):
log.error(
'Error creating {0} on LINODE\n\n'
'while waiting for initial ready status'.format(name),
exc_info_on_loglevel=logging.DEBUG
)
# Update the Linode's Label to reflect the given VM name
update_linode(node_id, update_args={'Label': name})
log.debug('Set name for {0} - was linode{1}.'.format(name, node_id))
# Add private IP address if requested
private_ip_assignment = get_private_ip(vm_)
if private_ip_assignment:
create_private_ip(node_id)
# Define which ssh_interface to use
ssh_interface = _get_ssh_interface(vm_)
# If ssh_interface is set to use private_ips, but assign_private_ip
# wasn't set to True, let's help out and create a private ip.
if ssh_interface == 'private_ips' and private_ip_assignment is False:
create_private_ip(node_id)
private_ip_assignment = True
if cloning:
config_id = get_config_id(kwargs={'linode_id': node_id})['config_id']
else:
# Create disks and get ids
log.debug('Creating disks for {0}'.format(name))
root_disk_id = create_disk_from_distro(vm_, node_id)['DiskID']
swap_disk_id = create_swap_disk(vm_, node_id)['DiskID']
# Create a ConfigID using disk ids
config_id = create_config(kwargs={'name': name,
'linode_id': node_id,
'root_disk_id': root_disk_id,
'swap_disk_id': swap_disk_id})['ConfigID']
# Boot the Linode
boot(kwargs={'linode_id': node_id,
'config_id': config_id,
'check_running': False})
node_data = get_linode(kwargs={'linode_id': node_id})
ips = get_ips(node_id)
state = int(node_data['STATUS'])
data['image'] = kwargs['image']
data['name'] = name
data['size'] = size
data['state'] = _get_status_descr_by_id(state)
data['private_ips'] = ips['private_ips']
data['public_ips'] = ips['public_ips']
# Pass the correct IP address to the bootstrap ssh_host key
if ssh_interface == 'private_ips':
vm_['ssh_host'] = data['private_ips'][0]
else:
vm_['ssh_host'] = data['public_ips'][0]
# If a password wasn't supplied in the profile or provider config, set it now.
vm_['password'] = get_password(vm_)
# Bootstrap!
ret = __utils__['cloud.bootstrap'](vm_, __opts__)
ret.update(data)
log.info('Created Cloud VM \'{0}\''.format(name))
log.debug(
'\'{0}\' VM creation details:\n{1}'.format(
name, pprint.pformat(data)
)
)
__utils__['cloud.fire_event'](
'event',
'created instance',
'salt/cloud/{0}/created'.format(name),
{
'name': name,
'profile': vm_['profile'],
'provider': vm_['driver'],
},
transport=__opts__['transport']
)
return ret
def create_config(kwargs=None, call=None):
'''
Creates a Linode Configuration Profile.
name
The name of the VM to create the config for.
linode_id
The ID of the Linode to create the configuration for.
root_disk_id
The Root Disk ID to be used for this config.
swap_disk_id
The Swap Disk ID to be used for this config.
data_disk_id
The Data Disk ID to be used for this config.
.. versionadded:: 2016.3.0
kernel_id
The ID of the kernel to use for this configuration profile.
'''
if call == 'action':
raise SaltCloudSystemExit(
'The create_config function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
name = kwargs.get('name', None)
linode_id = kwargs.get('linode_id', None)
root_disk_id = kwargs.get('root_disk_id', None)
swap_disk_id = kwargs.get('swap_disk_id', None)
data_disk_id = kwargs.get('data_disk_id', None)
kernel_id = kwargs.get('kernel_id', None)
if kernel_id is None:
# 138 appears to always be the latest 64-bit kernel for Linux
kernel_id = 138
required_params = [name, linode_id, root_disk_id, swap_disk_id]
for item in required_params:
if item is None:
raise SaltCloudSystemExit(
'The create_config functions requires a \'name\', \'linode_id\', '
'\'root_disk_id\', and \'swap_disk_id\'.'
)
disklist = '{0},{1}'.format(root_disk_id, swap_disk_id)
if data_disk_id is not None:
disklist = '{0},{1},{2}'.format(root_disk_id, swap_disk_id, data_disk_id)
config_args = {'LinodeID': linode_id,
'KernelID': kernel_id,
'Label': name,
'DiskList': disklist
}
result = _query('linode', 'config.create', args=config_args)
return _clean_data(result)
def create_disk_from_distro(vm_, linode_id, swap_size=None):
r'''
Creates the disk for the Linode from the distribution.
vm\_
The VM profile to create the disk for.
linode_id
The ID of the Linode to create the distribution disk for. Required.
swap_size
The size of the disk, in MB.
'''
kwargs = {}
if swap_size is None:
swap_size = get_swap_size(vm_)
pub_key = get_pub_key(vm_)
root_password = get_password(vm_)
if pub_key:
kwargs.update({'rootSSHKey': pub_key})
if root_password:
kwargs.update({'rootPass': root_password})
else:
raise SaltCloudConfigError(
'The Linode driver requires a password.'
)
kwargs.update({'LinodeID': linode_id,
'DistributionID': get_distribution_id(vm_),
'Label': vm_['name'],
'Size': get_disk_size(vm_, swap_size, linode_id)})
result = _query('linode', 'disk.createfromdistribution', args=kwargs)
return _clean_data(result)
def create_swap_disk(vm_, linode_id, swap_size=None):
r'''
Creates the disk for the specified Linode.
vm\_
The VM profile to create the swap disk for.
linode_id
The ID of the Linode to create the swap disk for.
swap_size
The size of the disk, in MB.
'''
kwargs = {}
if not swap_size:
swap_size = get_swap_size(vm_)
kwargs.update({'LinodeID': linode_id,
'Label': vm_['name'],
'Type': 'swap',
'Size': swap_size
})
result = _query('linode', 'disk.create', args=kwargs)
return _clean_data(result)
def create_data_disk(vm_=None, linode_id=None, data_size=None):
r'''
Create a data disk for the linode (type is hardcoded to ext4 at the moment)
.. versionadded:: 2016.3.0
vm\_
The VM profile to create the data disk for.
linode_id
The ID of the Linode to create the data disk for.
data_size
The size of the disk, in MB.
'''
kwargs = {}
kwargs.update({'LinodeID': linode_id,
'Label': vm_['name']+"_data",
'Type': 'ext4',
'Size': data_size
})
result = _query('linode', 'disk.create', args=kwargs)
return _clean_data(result)
def create_private_ip(linode_id):
r'''
Creates a private IP for the specified Linode.
linode_id
The ID of the Linode to create the IP address for.
'''
kwargs = {'LinodeID': linode_id}
result = _query('linode', 'ip.addprivate', args=kwargs)
return _clean_data(result)
def destroy(name, call=None):
'''
Destroys a Linode by name.
name
The name of VM to be be destroyed.
CLI Example:
.. code-block:: bash
salt-cloud -d vm_name
'''
if call == 'function':
raise SaltCloudException(
'The destroy action must be called with -d, --destroy, '
'-a or --action.'
)
__utils__['cloud.fire_event'](
'event',
'destroying instance',
'salt/cloud/{0}/destroying'.format(name),
{'name': name},
transport=__opts__['transport']
)
linode_id = get_linode_id_from_name(name)
response = _query('linode', 'delete', args={'LinodeID': linode_id, 'skipChecks': True})
__utils__['cloud.fire_event'](
'event',
'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name),
{'name': name},
transport=__opts__['transport']
)
if __opts__.get('update_cachedir', False) is True:
__utils__['cloud.delete_minion_cachedir'](name, __active_provider_name__.split(':')[0], __opts__)
return response
def get_config_id(kwargs=None, call=None):
'''
Returns a config_id for a given linode.
.. versionadded:: 2015.8.0
name
The name of the Linode for which to get the config_id. Can be used instead
of ``linode_id``.h
linode_id
The ID of the Linode for which to get the config_id. Can be used instead
of ``name``.
CLI Example:
.. code-block:: bash
salt-cloud -f get_config_id my-linode-config name=my-linode
salt-cloud -f get_config_id my-linode-config linode_id=1234567
'''
if call == 'action':
raise SaltCloudException(
'The get_config_id function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
name = kwargs.get('name', None)
linode_id = kwargs.get('linode_id', None)
if name is None and linode_id is None:
raise SaltCloudSystemExit(
'The get_config_id function requires either a \'name\' or a \'linode_id\' '
'to be provided.'
)
if linode_id is None:
linode_id = get_linode_id_from_name(name)
response = _query('linode', 'config.list', args={'LinodeID': linode_id})['DATA']
config_id = {'config_id': response[0]['ConfigID']}
return config_id
def get_datacenter_id(location):
'''
Returns the Linode Datacenter ID.
location
The location, or name, of the datacenter to get the ID from.
'''
return avail_locations()[location]['DATACENTERID']
def get_disk_size(vm_, swap, linode_id):
r'''
Returns the size of of the root disk in MB.
vm\_
The VM to get the disk size for.
'''
disk_size = get_linode(kwargs={'linode_id': linode_id})['TOTALHD']
return config.get_cloud_config_value(
'disk_size', vm_, __opts__, default=disk_size - swap
)
def get_data_disk_size(vm_, swap, linode_id):
'''
Return the size of of the data disk in MB
.. versionadded:: 2016.3.0
'''
disk_size = get_linode(kwargs={'linode_id': linode_id})['TOTALHD']
root_disk_size = config.get_cloud_config_value(
'disk_size', vm_, __opts__, default=disk_size - swap
)
return disk_size - root_disk_size - swap
def get_distribution_id(vm_):
r'''
Returns the distribution ID for a VM
vm\_
The VM to get the distribution ID for
'''
distributions = _query('avail', 'distributions')['DATA']
vm_image_name = config.get_cloud_config_value('image', vm_, __opts__)
distro_id = ''
for distro in distributions:
if vm_image_name == distro['LABEL']:
distro_id = distro['DISTRIBUTIONID']
return distro_id
if not distro_id:
raise SaltCloudNotFound(
'The DistributionID for the \'{0}\' profile could not be found.\n'
'The \'{1}\' instance could not be provisioned.'.format(
vm_image_name,
vm_['name']
)
)
def get_ips(linode_id=None):
'''
Returns public and private IP addresses.
linode_id
Limits the IP addresses returned to the specified Linode ID.
'''
if linode_id:
ips = _query('linode', 'ip.list', args={'LinodeID': linode_id})
else:
ips = _query('linode', 'ip.list')
ips = ips['DATA']
ret = {}
for item in ips:
node_id = str(item['LINODEID'])
if item['ISPUBLIC'] == 1:
key = 'public_ips'
else:
key = 'private_ips'
if ret.get(node_id) is None:
ret.update({node_id: {'public_ips': [], 'private_ips': []}})
ret[node_id][key].append(item['IPADDRESS'])
# If linode_id was specified, only return the ips, and not the
# dictionary based on the linode ID as a key.
if linode_id:
_all_ips = {'public_ips': [], 'private_ips': []}
matching_id = ret.get(str(linode_id))
if matching_id:
_all_ips['private_ips'] = matching_id['private_ips']
_all_ips['public_ips'] = matching_id['public_ips']
ret = _all_ips
return ret
def get_linode(kwargs=None, call=None):
'''
Returns data for a single named Linode.
name
The name of the Linode for which to get data. Can be used instead
``linode_id``. Note this will induce an additional API call
compared to using ``linode_id``.
linode_id
The ID of the Linode for which to get data. Can be used instead of
``name``.
CLI Example:
.. code-block:: bash
salt-cloud -f get_linode my-linode-config name=my-instance
salt-cloud -f get_linode my-linode-config linode_id=1234567
'''
if call == 'action':
raise SaltCloudSystemExit(
'The get_linode function must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
name = kwargs.get('name', None)
linode_id = kwargs.get('linode_id', None)
if name is None and linode_id is None:
raise SaltCloudSystemExit(
'The get_linode function requires either a \'name\' or a \'linode_id\'.'
)
if linode_id is None:
linode_id = get_linode_id_from_name(name)
result = _query('linode', 'list', args={'LinodeID': linode_id})
return result['DATA'][0]
def get_linode_id_from_name(name):
'''
Returns the Linode ID for a VM from the provided name.
name
The name of the Linode from which to get the Linode ID. Required.
'''
nodes = _query('linode', 'list')['DATA']
linode_id = ''
for node in nodes:
if name == node['LABEL']:
linode_id = node['LINODEID']
return linode_id
if not linode_id:
raise SaltCloudNotFound(
'The specified name, {0}, could not be found.'.format(name)
)
def get_password(vm_):
r'''
Return the password to use for a VM.
vm\_
The configuration to obtain the password from.
'''
return config.get_cloud_config_value(
'password', vm_, __opts__,
default=config.get_cloud_config_value(
'passwd', vm_, __opts__,
search_global=False
),
search_global=False
)
def get_plan_id(kwargs=None, call=None):
'''
Returns the Linode Plan ID.
label
The label, or name, of the plan to get the ID from.
CLI Example:
.. code-block:: bash
salt-cloud -f get_plan_id linode label="Linode 1024"
'''
if call == 'action':
raise SaltCloudException(
'The show_instance action must be called with -f or --function.'
)
if kwargs is None:
kwargs = {}
label = kwargs.get('label', None)
if label is None:
raise SaltCloudException(
'The get_plan_id function requires a \'label\'.'
)
return avail_sizes()[label]['PLANID']
def get_private_ip(vm_):
'''
Return True if a private ip address is requested
'''
if 'private_ip' in vm_:
warn_until(
'Carbon',
'The \'private_ip\' option is being deprecated in favor of the '
'\'assign_private_ip\' option. Please convert your Linode configuration '
'files to use \'assign_private_ip\'.'
)
vm_['assign_private_ip'] = vm_['private_ip']
vm_.pop('private_ip')
return config.get_cloud_config_value(
'assign_private_ip', vm_, __opts__, default=False
)
def get_data_disk(vm_):
'''
Return True if a data disk is requested
.. versionadded:: 2016.3.0
'''
return config.get_cloud_config_value(
'allocate_data_disk', vm_, __opts__, default=False
)
def get_pub_key(vm_):
r'''
Return the SSH pubkey.
vm\_
The configuration to obtain the public key from.
'''
return config.get_cloud_config_value(
'ssh_pubkey', vm_, __opts__, search_global=False
)
def get_swap_size(vm_):
r'''
Returns the amoutn of swap space to be used in MB.
vm\_
The VM profile to obtain the swap size from.
'''
return config.get_cloud_config_value(
'swap', vm_, __opts__, default=128
)
def get_vm_size(vm_):
r'''
Returns the VM's size.
vm\_
The VM to get the size for.
'''
vm_size = config.get_cloud_config_value('size', vm_, __opts__)
ram = avail_sizes()[vm_size]['RAM']
if vm_size.startswith('Linode'):
vm_size = vm_size.replace('Linode ', '')
if ram == int(vm_size):
return ram
else:
raise SaltCloudNotFound(
'The specified size, {0}, could not be found.'.format(vm_size)
)
def list_nodes(call=None):
'''
Returns a list of linodes, keeping only a brief listing.
CLI Example:
.. code-block:: bash
salt-cloud -Q
salt-cloud --query
salt-cloud -f list_nodes my-linode-config
.. note::
The ``image`` label only displays information about the VM's distribution vendor,
such as "Debian" or "RHEL" and does not display the actual image name. This is
due to a limitation of the Linode API.
'''
if call == 'action':
raise SaltCloudException(
'The list_nodes function must be called with -f or --function.'
)
return _list_linodes(full=False)
def list_nodes_full(call=None):
'''
List linodes, with all available information.
CLI Example:
.. code-block:: bash
salt-cloud -F
salt-cloud --full-query
salt-cloud -f list_nodes_full my-linode-config
.. note::
The ``image`` label only displays information about the VM's distribution vendor,
such as "Debian" or "RHEL" and does not display the actual image name. This is
due to a limitation of the Linode API.
'''
if call == 'action':
raise SaltCloudException(
'The list_nodes_full function must be called with -f or --function.'
)
return _list_linodes(full=True)
def list_nodes_min(call=None):
'''
Return a list of the VMs that are on the provider. Only a list of VM names and
their state is returned. This is the minimum amount of information needed to
check for existing VMs.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt-cloud -f list_nodes_min my-linode-config
salt-cloud --function list_nodes_min my-linode-config
'''
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes_min function must be called with -f or --function.'
)
ret = {}
nodes = _query('linode', 'list')['DATA']
for node in nodes:
name = node['LABEL']
this_node = {
'id': str(node['LINODEID']),
'state': _get_status_descr_by_id(int(node['STATUS']))
}
ret[name] = this_node
return ret
def list_nodes_select(call=None):
'''
Return a list of the VMs that are on the provider, with select fields.
'''
return salt.utils.cloud.list_nodes_select(
list_nodes_full(), __opts__['query.selection'], call,
)
def reboot(name, call=None):
'''
Reboot a linode.
.. versionadded:: 2015.8.0
name
The name of the VM to reboot.
CLI Example:
.. code-block:: bash
salt-cloud -a reboot vm_name
'''
if call != 'action':
raise SaltCloudException(
'The show_instance action must be called with -a or --action.'
)
node_id = get_linode_id_from_name(name)
response = _query('linode', 'reboot', args={'LinodeID': node_id})
data = _clean_data(response)
reboot_jid = data['JobID']
if not _wait_for_job(node_id, reboot_jid):
log.error('Reboot failed for {0}.'.format(name))
return False
return data
def show_instance(name, call=None):
'''
Displays details about a particular Linode VM. Either a name or a linode_id must
be provided.
.. versionadded:: 2015.8.0
name
The name of the VM for which to display details.
CLI Example:
.. code-block:: bash
salt-cloud -a show_instance vm_name
.. note::
The ``image`` label only displays information about the VM's distribution vendor,
such as "Debian" or "RHEL" and does not display the actual image name. This is
due to a limitation of the Linode API.
'''
if call != 'action':
raise SaltCloudException(
'The show_instance action must be called with -a or --action.'
)
node_id = get_linode_id_from_name(name)
node_data = get_linode(kwargs={'linode_id': node_id})
ips = get_ips(node_id)
state = int(node_data['STATUS'])
ret = {'id': node_data['LINODEID'],
'image': node_data['DISTRIBUTIONVENDOR'],
'name': node_data['LABEL'],
'size': node_data['TOTALRAM'],
'state': _get_status_descr_by_id(state),
'private_ips': ips['private_ips'],
'public_ips': ips['public_ips']}
return ret
def show_pricing(kwargs=None, call=None):
'''
Show pricing for a particular profile. This is only an estimate, based on
unofficial pricing sources.
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt-cloud -f show_pricing my-linode-config profile=my-linode-profile
'''
if call != 'function':
raise SaltCloudException(
'The show_instance action must be called with -f or --function.'
)
profile = __opts__['profiles'].get(kwargs['profile'], {})
if not profile:
raise SaltCloudNotFound(
'The requested profile was not found.'
)
# Make sure the profile belongs to Linode
provider = profile.get('provider', '0:0')
comps = provider.split(':')
if len(comps) < 2 or comps[1] != 'linode':
raise SaltCloudException(
'The requested profile does not belong to Linode.'
)
plan_id = get_plan_id(kwargs={'label': profile['size']})
response = _query('avail', 'linodeplans', args={'PlanID': plan_id})['DATA'][0]
ret = {}
ret['per_hour'] = response['HOURLY']
ret['per_day'] = ret['per_hour'] * 24
ret['per_week'] = ret['per_day'] * 7
ret['per_month'] = response['PRICE']
ret['per_year'] = ret['per_month'] * 12
return {profile['profile']: ret}
def start(name, call=None):
'''
Start a VM in Linode.
name
The name of the VM to start.
CLI Example:
.. code-block:: bash
salt-cloud -a stop vm_name
'''
if call != 'action':
raise SaltCloudException(
'The start action must be called with -a or --action.'
)
node_id = get_linode_id_from_name(name)
node = get_linode(kwargs={'linode_id': node_id})
if node['STATUS'] == 1:
return {'success': True,
'action': 'start',
'state': 'Running',
'msg': 'Machine already running'}
response = _query('linode', 'boot', args={'LinodeID': node_id})['DATA']
if _wait_for_job(node_id, response['JobID']):
return {'state': 'Running',
'action': 'start',
'success': True}
else:
return {'action': 'start',
'success': False}
def stop(name, call=None):
'''
Stop a VM in Linode.
name
The name of the VM to stop.
CLI Example:
.. code-block:: bash
salt-cloud -a stop vm_name
'''
if call != 'action':
raise SaltCloudException(
'The stop action must be called with -a or --action.'
)
node_id = get_linode_id_from_name(name)
node = get_linode(kwargs={'linode_id': node_id})
if node['STATUS'] == 2:
return {'success': True,
'state': 'Stopped',
'msg': 'Machine already stopped'}
response = _query('linode', 'shutdown', args={'LinodeID': node_id})['DATA']
if _wait_for_job(node_id, response['JobID']):
return {'state': 'Stopped',
'action': 'stop',
'success': True}
else:
return {'action': 'stop',
'success': False}
def update_linode(linode_id, update_args=None):
'''
Updates a Linode's properties.
linode_id
The ID of the Linode to shutdown. Required.
update_args
The args to update the Linode with. Must be in dictionary form.
'''
update_args.update({'LinodeID': linode_id})
result = _query('linode', 'update', args=update_args)
return _clean_data(result)
def _clean_data(api_response):
'''
Returns the DATA response from a Linode API query as a single pre-formatted dictionary
api_response
The query to be cleaned.
'''
data = {}
data.update(api_response['DATA'])
if not data:
response_data = api_response['DATA']
data.update(response_data)
return data
def _list_linodes(full=False):
'''
Helper function to format and parse linode data
'''
nodes = _query('linode', 'list')['DATA']
ips = get_ips()
ret = {}
for node in nodes:
this_node = {}
linode_id = str(node['LINODEID'])
this_node['id'] = linode_id
this_node['image'] = node['DISTRIBUTIONVENDOR']
this_node['name'] = node['LABEL']
this_node['size'] = node['TOTALRAM']
state = int(node['STATUS'])
this_node['state'] = _get_status_descr_by_id(state)
for key, val in six.iteritems(ips):
if key == linode_id:
this_node['private_ips'] = val['private_ips']
this_node['public_ips'] = val['public_ips']
if full:
this_node['extra'] = node
ret[node['LABEL']] = this_node
return ret
def _query(action=None,
command=None,
args=None,
method='GET',
header_dict=None,
data=None,
url='https://api.linode.com/'):
'''
Make a web call to the Linode API.
'''
global LASTCALL
vm_ = get_configured_provider()
ratelimit_sleep = config.get_cloud_config_value(
'ratelimit_sleep', vm_, __opts__, search_global=False, default=0,
)
apikey = config.get_cloud_config_value(
'apikey', vm_, __opts__, search_global=False
)
if not isinstance(args, dict):
args = {}
if 'api_key' not in args.keys():
args['api_key'] = apikey
if action and 'api_action' not in args.keys():
args['api_action'] = '{0}.{1}'.format(action, command)
if header_dict is None:
header_dict = {}
if method != 'POST':
header_dict['Accept'] = 'application/json'
decode = True
if method == 'DELETE':
decode = False
now = int(time.mktime(datetime.datetime.now().timetuple()))
if LASTCALL >= now:
time.sleep(ratelimit_sleep)
result = salt.utils.http.query(
url,
method,
params=args,
data=data,
header_dict=header_dict,
decode=decode,
decode_type='json',
text=True,
status=True,
hide_fields=['api_key', 'rootPass'],
opts=__opts__,
)
LASTCALL = int(time.mktime(datetime.datetime.now().timetuple()))
log.debug(
'Linode Response Status Code: {0}'.format(
result['status']
)
)
return result['dict']
def _wait_for_job(linode_id, job_id, timeout=300, quiet=True):
'''
Wait for a Job to return.
linode_id
The ID of the Linode to wait on. Required.
job_id
The ID of the job to wait for.
timeout
The amount of time to wait for a status to update.
quiet
Log status updates to debug logs when True. Otherwise, logs to info.
'''
interval = 5
iterations = int(timeout / interval)
for i in range(0, iterations):
jobs_result = _query('linode',
'job.list',
args={'LinodeID': linode_id})['DATA']
if jobs_result[0]['JOBID'] == job_id and jobs_result[0]['HOST_SUCCESS'] == 1:
return True
time.sleep(interval)
if not quiet:
log.info('Still waiting on Job {0} for Linode {1}.'.format(
job_id,
linode_id)
)
else:
log.debug('Still waiting on Job {0} for Linode {1}.'.format(
job_id,
linode_id)
)
return False
def _wait_for_status(linode_id, status=None, timeout=300, quiet=True):
'''
Wait for a certain status from Linode.
linode_id
The ID of the Linode to wait on. Required.
status
The status to look for to update.
timeout
The amount of time to wait for a status to update.
quiet
Log status updates to debug logs when False. Otherwise, logs to info.
'''
if status is None:
status = _get_status_id_by_name('brand_new')
status_desc_waiting = _get_status_descr_by_id(status)
interval = 5
iterations = int(timeout / interval)
for i in range(0, iterations):
result = get_linode(kwargs={'linode_id': linode_id})
if result['STATUS'] == status:
return True
status_desc_result = _get_status_descr_by_id(result['STATUS'])
time.sleep(interval)
if quiet:
log.info('Status for Linode {0} is \'{1}\', waiting for \'{2}\'.'.format(
linode_id,
status_desc_result,
status_desc_waiting)
)
else:
log.debug('Status for Linode {0} is \'{1}\', waiting for \'{2}\'.'.format(
linode_id,
status_desc_result,
status_desc_waiting)
)
return False
def _get_status_descr_by_id(status_id):
'''
Return linode status by ID
status_id
linode VM status ID
'''
for status_name, status_data in LINODE_STATUS.iteritems():
if status_data['code'] == int(status_id):
return status_data['descr']
return LINODE_STATUS.get(status_id, None)
def _get_status_id_by_name(status_name):
'''
Return linode status description by internalstatus name
status_name
internal linode VM status name
'''
return LINODE_STATUS.get(status_name, {}).get('code', None)
def _validate_name(name):
'''
Checks if the provided name fits Linode's labeling parameters.
.. versionadded:: 2015.5.6
name
The VM name to validate
'''
name = str(name)
name_length = len(name)
regex = re.compile(r'^[a-zA-Z0-9][A-Za-z0-9_-]*[a-zA-Z0-9]$')
if name_length < 3 or name_length > 48:
ret = False
elif not re.match(regex, name):
ret = False
else:
ret = True
if ret is False:
log.warning(
'A Linode label may only contain ASCII letters or numbers, dashes, and '
'underscores, must begin and end with letters or numbers, and be at least '
'three characters in length.'
)
return ret
def _get_ssh_interface(vm_):
'''
Return the ssh_interface type to connect to. Either 'public_ips' (default)
or 'private_ips'.
'''
return config.get_cloud_config_value(
'ssh_interface', vm_, __opts__, default='public_ips',
search_global=False
)
| {
"content_hash": "9a74587190c167368a9e0b42c366e917",
"timestamp": "",
"source": "github",
"line_count": 1671,
"max_line_length": 111,
"avg_line_length": 26.590664272890486,
"alnum_prop": 0.5654355996669143,
"repo_name": "stephane-martin/salt-debian-packaging",
"id": "df77c62f6ccd0d094abaab5a3c30d1389f4025d4",
"size": "44457",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt-2016.3.3/salt/cloud/clouds/linode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13798"
},
{
"name": "C",
"bytes": "986"
},
{
"name": "Groff",
"bytes": "13634346"
},
{
"name": "HTML",
"bytes": "39558"
},
{
"name": "Makefile",
"bytes": "20902"
},
{
"name": "NSIS",
"bytes": "22316"
},
{
"name": "PowerShell",
"bytes": "38719"
},
{
"name": "Python",
"bytes": "40857506"
},
{
"name": "SaltStack",
"bytes": "58278"
},
{
"name": "Scheme",
"bytes": "1790"
},
{
"name": "Shell",
"bytes": "829927"
},
{
"name": "Tcl",
"bytes": "6532"
},
{
"name": "TeX",
"bytes": "11632"
}
],
"symlink_target": ""
} |
from lxml import etree
class elements:
def __init__(self):
pass
def strip_empty_elements(self, doc):
"""Remove empty elements from the document.
Solr date fields don't like to be empty - hence why this
method exists. As it turns out, it can't hurt to ditch empty
elements - less to submit. Hence why it's generic
@params:
doc: the XML document
"""
for elem in doc.iter('field'):
if elem.text is None:
elem.getparent().remove(elem)
| {
"content_hash": "21b7ee41b2993e13806f4f033537271b",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 68,
"avg_line_length": 27.6,
"alnum_prop": 0.5778985507246377,
"repo_name": "MLR-au/esrc-indexer",
"id": "94af1c5bf988c956688644191ee7e506d3dac7e6",
"size": "1923",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "indexer/clean/empty.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "45421"
},
{
"name": "XSLT",
"bytes": "140523"
}
],
"symlink_target": ""
} |
from lxml import etree
from oslo.serialization import jsonutils
import webob
from nova.compute import flavors
from nova import test
from nova.tests.api.openstack import fakes
FAKE_FLAVORS = {
'flavor 1': {
"flavorid": '1',
"name": 'flavor 1',
"memory_mb": '256',
"root_gb": '10',
"swap": '5',
"disabled": False,
"ephemeral_gb": '20',
"rxtx_factor": '1.0',
"vcpus": 1,
},
'flavor 2': {
"flavorid": '2',
"name": 'flavor 2',
"memory_mb": '512',
"root_gb": '10',
"swap": '10',
"ephemeral_gb": '25',
"rxtx_factor": None,
"disabled": False,
"vcpus": 1,
},
}
def fake_flavor_get_by_flavor_id(flavorid, ctxt=None):
return FAKE_FLAVORS['flavor %s' % flavorid]
def fake_get_all_flavors_sorted_list(context=None, inactive=False,
filters=None, sort_key='flavorid',
sort_dir='asc', limit=None, marker=None):
return [
fake_flavor_get_by_flavor_id(1),
fake_flavor_get_by_flavor_id(2)
]
class FlavorRxtxTestV21(test.NoDBTestCase):
content_type = 'application/json'
_prefix = "/v2/fake"
def setUp(self):
super(FlavorRxtxTestV21, self).setUp()
ext = ('nova.api.openstack.compute.contrib'
'.flavor_rxtx.Flavor_rxtx')
self.flags(osapi_compute_extension=[ext])
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(flavors, "get_all_flavors_sorted_list",
fake_get_all_flavors_sorted_list)
self.stubs.Set(flavors,
"get_flavor_by_flavor_id",
fake_flavor_get_by_flavor_id)
def _make_request(self, url):
req = webob.Request.blank(url)
req.headers['Accept'] = self.content_type
res = req.get_response(self._get_app())
return res
def _get_app(self):
return fakes.wsgi_app_v21(init_only=('servers',
'flavors', 'os-flavor-rxtx'))
def _get_flavor(self, body):
return jsonutils.loads(body).get('flavor')
def _get_flavors(self, body):
return jsonutils.loads(body).get('flavors')
def assertFlavorRxtx(self, flavor, rxtx):
self.assertEqual(str(flavor.get('rxtx_factor')), rxtx)
def test_show(self):
url = self._prefix + '/flavors/1'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
self.assertFlavorRxtx(self._get_flavor(res.body), '1.0')
def test_detail(self):
url = self._prefix + '/flavors/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
flavors = self._get_flavors(res.body)
self.assertFlavorRxtx(flavors[0], '1.0')
self.assertFlavorRxtx(flavors[1], '')
class FlavorRxtxTestV20(FlavorRxtxTestV21):
def _get_app(self):
return fakes.wsgi_app()
class FlavorRxtxXmlTest(FlavorRxtxTestV20):
content_type = 'application/xml'
def _get_flavor(self, body):
return etree.XML(body)
def _get_flavors(self, body):
return etree.XML(body).getchildren()
| {
"content_hash": "a4bb243fd21f19b9f893afa403147dc2",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 78,
"avg_line_length": 28.353982300884955,
"alnum_prop": 0.5749063670411985,
"repo_name": "maelnor/nova",
"id": "9b4a06cc1496584af575e3aded6a39c643cc5939",
"size": "3809",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "nova/tests/api/openstack/compute/contrib/test_flavor_rxtx.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15078191"
},
{
"name": "Shell",
"bytes": "18352"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(
name = 'dnsdiff',
packages = ['dnsdiff'], # this must be the same as the name above
scripts=['dnsdiff/dnsdiff'],
version = '1.3',
description = 'Utility to quickly suss out discrepancies between nameservers',
author = 'M Anzuoni',
author_email = 'me.anzuoni@gmail.com',
url = 'https://github.com/gawkermedia/dnsdiff', # use the URL to the github repo
download_url = 'https://github.com/gawkermedia/dnsdiff/archive/1.3.tar.gz', # I'll explain this in a second
keywords = ['testing', 'dns',], # arbitrary keywords
classifiers = [],
install_requires=['dnspython']
) | {
"content_hash": "865c7e05050134e00a4035fcf44c3c49",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 109,
"avg_line_length": 39.1875,
"alnum_prop": 0.6953748006379585,
"repo_name": "gawkermedia/dnsdiff",
"id": "1559d0060273b010bec6a35fdb4da7d7e5555f92",
"size": "627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2953"
}
],
"symlink_target": ""
} |
"""Tests for tensorflow.python.client.session.Session."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import random
import os
import sys
import threading
import time
import warnings
import numpy as np
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.lib.core import error_codes_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import device as framework_device_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import function
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
from tensorflow.python.framework import versions
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import gen_control_flow_ops
# Import gradients to resolve circular imports
from tensorflow.python.ops import gradients # pylint: disable=unused-import
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
# Import resource_variable_ops for the variables-to-tensor implicit conversion.
from tensorflow.python.ops import resource_variable_ops # pylint: disable=unused-import
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import server_lib
from tensorflow.python.util import compat
try:
import attr # pylint:disable=g-import-not-at-top
except ImportError:
attr = None
# NOTE(mrry): Dummy shape registration for ops used in the tests, since they
# don't have C++ op registrations on which to attach C++ shape fns.
ops.RegisterShape('ConstructionFails')(common_shapes.unknown_shape)
class SessionTest(test_util.TensorFlowTestCase):
def setUp(self):
super(SessionTest, self).setUp()
warnings.simplefilter('always')
def testUseExistingGraph(self):
with ops.Graph().as_default() as g, ops.device('/cpu:0'):
a = constant_op.constant(6.0, shape=[1, 1])
b = constant_op.constant(7.0, shape=[1, 1])
c = math_ops.matmul(a, b, name='matmul')
with session.Session(graph=g):
result = c.eval()
self.assertAllEqual(result, [[42.0]])
def testUseDefaultGraph(self):
with ops.Graph().as_default(), ops.device('/cpu:0'):
a = constant_op.constant(6.0, shape=[1, 1])
b = constant_op.constant(7.0, shape=[1, 1])
c = math_ops.matmul(a, b, name='matmul')
with session.Session():
result = c.eval()
self.assertAllEqual(result, [[42.0]])
def testCreate(self):
with session.Session():
inp = constant_op.constant(10.0, shape=[2, 3], name='W1')
copy = array_ops.identity(inp)
# Test with feed.
# TODO(mrry): Investigate why order='F' didn't work.
arr = np.asarray([[0, 1, 2], [3, 4, 5]], dtype=np.float32, order='C')
copy_val = copy.eval({'W1:0': arr})
self.assertAllEqual(arr, copy_val)
# Test without feed.
copy_val = copy.eval()
self.assertAllEqual(
np.asarray(
[[10.0, 10.0, 10.0], [10.0, 10.0, 10.0]], dtype=np.float32),
copy_val)
def testManyCPUs(self):
with session.Session(
config=config_pb2.ConfigProto(device_count={
'CPU': 2, 'GPU': 0
})) as sess:
inp = constant_op.constant(10.0, name='W1')
self.assertAllEqual(inp.eval(), 10.0)
num_cpu_devices = 0
num_gpu_devices = 0
for device in sess.list_devices():
device_type = framework_device_lib.DeviceSpec.from_string(
device.name).device_type
if device_type == 'CPU':
num_cpu_devices += 1
elif device_type == 'GPU':
num_gpu_devices += 1
self.assertEqual(2, num_cpu_devices)
self.assertEqual(0, num_gpu_devices)
def testPerSessionThreads(self):
with session.Session(
config=config_pb2.ConfigProto(use_per_session_threads=True)):
inp = constant_op.constant(10.0, name='W1')
self.assertAllEqual(inp.eval(), 10.0)
def testSessionInterOpThreadPool(self):
config = config_pb2.ConfigProto()
pool = config.session_inter_op_thread_pool.add()
with session.Session(config=config) as s:
inp = constant_op.constant(10.0, name='W1')
results = s.run([inp])
self.assertAllEqual([10.0], results)
pool = config.session_inter_op_thread_pool.add()
pool.num_threads = 1
with session.Session(config=config) as s:
inp = constant_op.constant(20.0, name='W2')
results = s.run([inp])
self.assertAllEqual([20.0], results)
pool = config.session_inter_op_thread_pool.add()
pool.num_threads = 1
pool.global_name = 't1'
run_options = config_pb2.RunOptions()
run_options.inter_op_thread_pool = (
len(config.session_inter_op_thread_pool) - 1)
with session.Session(config=config) as s:
inp = constant_op.constant(30.0, name='W2')
results = s.run([inp], options=run_options)
self.assertAllEqual([30.0], results)
def testErrorsReported(self):
with session.Session() as s:
constant_op.constant(10.0, name='W1')
with self.assertRaises(ValueError):
s.run('foo:0')
def testErrorPayload(self):
with session.Session():
a = array_ops.placeholder(dtypes.float32)
with self.assertRaisesOpError(lambda e: e.op == a.op):
a.eval()
def testErrorCodeWithNoNodeDef(self):
with session.Session() as s:
a = array_ops.placeholder(dtypes.float32, shape=[])
b = array_ops.placeholder(dtypes.float32, shape=[])
r1 = math_ops.add(a, b)
def exc_predicate(e):
return (e.op is None and e.node_def is None and
e.error_code == error_codes_pb2.INVALID_ARGUMENT)
with self.assertRaisesOpError(exc_predicate):
# Run with a bogus handle.
s.partial_run('foo', r1, feed_dict={a: 1, b: 2})
def testErrorBasedOn(self):
with session.Session() as sess:
a = constant_op.constant(0.0, shape=[2, 3])
# NOTE(mrry): The original_op is nonsense, but used here to test that the
# errors are reported correctly.
with sess.graph._original_op(a.op):
b = array_ops.identity(a, name='id')
with sess.graph._original_op(b.op):
c = array_ops.placeholder(dtypes.float32)
def exc_predicate(e):
return (e.op == c.op and e.op._original_op == b.op and
e.op._original_op._original_op == a.op)
with self.assertRaisesOpError(exc_predicate):
c.eval()
def testFetchNone(self):
with session.Session() as s:
a = constant_op.constant(1.0)
with self.assertRaises(TypeError):
s.run(None)
with self.assertRaises(TypeError):
s.run([None])
with self.assertRaises(TypeError):
s.run({'b': None})
with self.assertRaises(TypeError):
s.run({'a': a, 'b': None})
def testFetchSingleton(self):
with session.Session() as sess:
a = constant_op.constant(42.0)
res = sess.run(a)
self.assertEqual(42.0, res)
res = sess.run(a.op) # An op, not a tensor.
self.assertEqual(None, res)
tensor_runner = sess.make_callable(a)
res = tensor_runner()
self.assertEqual(42.0, res)
op_runner = sess.make_callable(a.op)
res = op_runner()
self.assertEqual(None, res)
def testFetchSingletonByName(self):
with session.Session() as sess:
a = constant_op.constant(42.0)
res = sess.run(a.name)
self.assertEqual(42.0, res)
res = sess.run(a.op) # An op, not a tensor.
self.assertEqual(None, res)
def testFetchList(self):
with session.Session() as sess:
a = constant_op.constant(42.0)
b = control_flow_ops.no_op() # An op, not a tensor.
c = constant_op.constant(44.0)
v = variables.Variable([54.0])
assign = v.assign([63.0])
res = sess.run([a, b, c, a.name, assign.op])
self.assertTrue(isinstance(res, list))
self.assertEqual([42.0, None, 44.0, 42.0, None], res)
list_runner = sess.make_callable([a, b, c, a.name, assign.op])
res = list_runner()
self.assertTrue(isinstance(res, list))
self.assertEqual([42.0, None, 44.0, 42.0, None], res)
def testFetchTuple(self):
with session.Session() as sess:
a = constant_op.constant(42.0)
b = control_flow_ops.no_op() # An op, not a tensor.
c = constant_op.constant(44.0)
res = sess.run((a, b, c, a.name))
self.assertTrue(isinstance(res, tuple))
self.assertEqual((42.0, None, 44.0, 42.0), res)
tuple_runner = sess.make_callable((a, b, c, a.name))
res = tuple_runner()
self.assertTrue(isinstance(res, tuple))
self.assertEqual((42.0, None, 44.0, 42.0), res)
def testFetchNamedTuple(self):
# pylint: disable=invalid-name
ABC = collections.namedtuple('ABC', ['a', 'b', 'c'])
# pylint: enable=invalid-name
with session.Session() as sess:
a = constant_op.constant(42.0)
b = control_flow_ops.no_op() # An op, not a tensor.
c = constant_op.constant(44.0)
res = sess.run(ABC(a, b, c))
self.assertTrue(isinstance(res, ABC))
self.assertEqual(42.0, res.a)
self.assertEqual(None, res.b)
self.assertEqual(44.0, res.c)
namedtuple_runner = sess.make_callable(ABC(a, b, c))
res = namedtuple_runner()
self.assertTrue(isinstance(res, ABC))
self.assertEqual(42.0, res.a)
self.assertEqual(None, res.b)
self.assertEqual(44.0, res.c)
def testFetchDict(self):
with session.Session() as sess:
a = constant_op.constant(42.0)
b = control_flow_ops.no_op() # An op, not a tensor.
c = constant_op.constant(44.0)
res = sess.run({'a': a, 'b': b, 'c': c})
self.assertTrue(isinstance(res, dict))
self.assertEqual(42.0, res['a'])
self.assertEqual(None, res['b'])
self.assertEqual(44.0, res['c'])
def testFetchOrderedDict(self):
with session.Session() as sess:
a = constant_op.constant(42.0)
b = control_flow_ops.no_op() # An op, not a tensor.
c = constant_op.constant(44.0)
res = sess.run(collections.OrderedDict([(3, a), (2, b), (1, c)]))
self.assertTrue(isinstance(res, collections.OrderedDict))
self.assertEqual([3, 2, 1], list(res.keys()))
self.assertEqual(42.0, res[3])
self.assertEqual(None, res[2])
self.assertEqual(44.0, res[1])
@test_util.run_v1_only('b/120545219')
def testFetchAttrs(self):
if attr is None:
self.skipTest('attr module is unavailable.')
@attr.s
class SampleAttr(object):
field1 = attr.ib()
field2 = attr.ib()
val1 = np.array([1.2, 3.4, 5.6])
val2 = np.array([[1, 2], [4, 3]])
val3 = np.array([10, 20, 30])
t1 = constant_op.constant(val1)
t2 = constant_op.constant(val2)
sample = SampleAttr(t1, t2)
with session.Session() as sess:
result = sess.run(sample)
self.assertIsInstance(result, SampleAttr)
self.assertAllEqual(val1, result.field1)
self.assertAllEqual(val2, result.field2)
result = sess.run(sample, feed_dict={sample.field1: val3})
self.assertIsInstance(result, SampleAttr)
self.assertAllEqual(val3, result.field1)
self.assertAllEqual(val2, result.field2)
@test_util.run_v1_only('b/120545219')
def testFetchNestedAttrs(self):
if attr is None:
self.skipTest('attr module is unavailable.')
@attr.s
class SampleAttr(object):
field0 = attr.ib()
field1 = attr.ib()
v1 = 10
v2 = 20
v3 = np.float32(1.2)
v4 = np.float32(3.4)
v5 = np.float64(100.001)
v6 = np.float64(-23.451)
arr1 = np.array([1.2, 6.7, 3.4])
arr2 = np.array([7, 11, 3])
sample = SampleAttr(
SampleAttr(
SampleAttr(constant_op.constant(v1), constant_op.constant(v2)),
SampleAttr(constant_op.constant(arr1), constant_op.constant(arr2))),
{'A': SampleAttr(constant_op.constant(v3), constant_op.constant(v4)),
'B': [SampleAttr(constant_op.constant(v5), constant_op.constant(v6))]})
with session.Session() as sess:
result = sess.run(sample)
self.assertIsInstance(result, SampleAttr)
self.assertIsInstance(result.field0, SampleAttr)
self.assertIsInstance(result.field0.field0, SampleAttr)
self.assertIsInstance(result.field0.field1, SampleAttr)
self.assertIsInstance(result.field0.field1.field0, np.ndarray)
self.assertAllEqual(arr1, result.field0.field1.field0)
self.assertIsInstance(result.field0.field1.field1, np.ndarray)
self.assertAllEqual(arr2, result.field0.field1.field1)
self.assertIsInstance(result.field1, dict)
self.assertIn('A', result.field1)
self.assertIn('B', result.field1)
self.assertIsInstance(result.field1['A'], SampleAttr)
self.assertAllEqual(
[v3, v4],
[result.field1['A'].field0, result.field1['A'].field1])
self.assertIsInstance(result.field1['B'], list)
self.assertEqual(1, len(result.field1['B']))
self.assertIsInstance(result.field1['B'][0], SampleAttr)
self.assertAllEqual(
[v5, v6],
[result.field1['B'][0].field0, result.field1['B'][0].field1])
def testFetchNestingEmptyOneLevel(self):
with session.Session() as sess:
a_val = 11.0
a = constant_op.constant(a_val)
res = sess.run([[], tuple(), {}])
self.assertTrue(isinstance(res, list))
self.assertEquals(3, len(res))
self.assertTrue(isinstance(res[0], list))
self.assertEqual(0, len(res[0]))
self.assertTrue(isinstance(res[1], tuple))
self.assertEqual(0, len(res[1]))
self.assertTrue(isinstance(res[2], dict))
self.assertEqual(0, len(res[2]))
res = sess.run([[], tuple(), {}, a])
self.assertTrue(isinstance(res, list))
self.assertEquals(4, len(res))
self.assertTrue(isinstance(res[0], list))
self.assertEqual(0, len(res[0]))
self.assertTrue(isinstance(res[1], tuple))
self.assertEqual(0, len(res[1]))
self.assertTrue(isinstance(res[2], dict))
self.assertEqual(0, len(res[2]))
self.assertEqual(a_val, res[3])
def testFetchNestingOneLevel(self):
with session.Session() as sess:
# pylint: disable=invalid-name
ABC = collections.namedtuple('ABC', ['a', 'b', 'c'])
DEFG = collections.namedtuple('DEFG', ['d', 'e', 'f', 'g'])
# pylint: enable=invalid-name
a_val = 42.0
b_val = None
c_val = 44.0
a = constant_op.constant(a_val)
b = control_flow_ops.no_op() # An op, not a tensor.
c = constant_op.constant(c_val)
# List of lists, tuples, namedtuple, and dict
res = sess.run([[a, b, c], (a, b, c),
ABC(a=a, b=b, c=c), {
'a': a.name,
'c': c,
'b': b
}])
self.assertTrue(isinstance(res, list))
self.assertEqual(4, len(res))
self.assertTrue(isinstance(res[0], list))
self.assertEqual(3, len(res[0]))
self.assertEqual(a_val, res[0][0])
self.assertEqual(b_val, res[0][1])
self.assertEqual(c_val, res[0][2])
self.assertTrue(isinstance(res[1], tuple))
self.assertEqual(3, len(res[1]))
self.assertEqual(a_val, res[1][0])
self.assertEqual(b_val, res[1][1])
self.assertEqual(c_val, res[1][2])
self.assertTrue(isinstance(res[2], ABC))
self.assertEqual(a_val, res[2].a)
self.assertEqual(b_val, res[2].b)
self.assertEqual(c_val, res[2].c)
self.assertTrue(isinstance(res[3], dict))
self.assertEqual(3, len(res[3]))
self.assertEqual(a_val, res[3]['a'])
self.assertEqual(b_val, res[3]['b'])
self.assertEqual(c_val, res[3]['c'])
# Tuple of lists, tuples, namedtuple, and dict
res = sess.run(([a, b, c], (a.name, b, c), ABC(a=a, b=b, c=c), {
'a': a,
'c': c,
'b': b
}))
self.assertTrue(isinstance(res, tuple))
self.assertEqual(4, len(res))
self.assertTrue(isinstance(res[0], list))
self.assertEqual(3, len(res[0]))
self.assertEqual(a_val, res[0][0])
self.assertEqual(b_val, res[0][1])
self.assertEqual(c_val, res[0][2])
self.assertTrue(isinstance(res[1], tuple))
self.assertEqual(3, len(res[1]))
self.assertEqual(a_val, res[1][0])
self.assertEqual(b_val, res[1][1])
self.assertEqual(c_val, res[1][2])
self.assertTrue(isinstance(res[2], ABC))
self.assertEqual(a_val, res[2].a)
self.assertEqual(b_val, res[2].b)
self.assertEqual(c_val, res[2].c)
self.assertTrue(isinstance(res[3], dict))
self.assertEqual(3, len(res[3]))
self.assertEqual(a_val, res[3]['a'])
self.assertEqual(b_val, res[3]['b'])
self.assertEqual(c_val, res[3]['c'])
# Namedtuple of lists, tuples, namedtuples, and dict
res = sess.run(
DEFG(
d=[a, b, c],
e=(a, b, c),
f=ABC(a=a.name, b=b, c=c),
g={
'a': a,
'c': c,
'b': b
}))
self.assertTrue(isinstance(res, DEFG))
self.assertTrue(isinstance(res.d, list))
self.assertEqual(3, len(res.d))
self.assertEqual(a_val, res.d[0])
self.assertEqual(b_val, res.d[1])
self.assertEqual(c_val, res.d[2])
self.assertTrue(isinstance(res.e, tuple))
self.assertEqual(3, len(res.e))
self.assertEqual(a_val, res.e[0])
self.assertEqual(b_val, res.e[1])
self.assertEqual(c_val, res.e[2])
self.assertTrue(isinstance(res.f, ABC))
self.assertEqual(a_val, res.f.a)
self.assertEqual(b_val, res.f.b)
self.assertEqual(c_val, res.f.c)
self.assertTrue(isinstance(res.g, dict))
self.assertEqual(3, len(res.g))
self.assertEqual(a_val, res.g['a'])
self.assertEqual(b_val, res.g['b'])
self.assertEqual(c_val, res.g['c'])
# Dict of lists, tuples, namedtuples, and dict
res = sess.run({
'd': [a, b, c],
'e': (a, b, c),
'f': ABC(a=a, b=b, c=c),
'g': {
'a': a.name,
'c': c,
'b': b
}
})
self.assertTrue(isinstance(res, dict))
self.assertEqual(4, len(res))
self.assertTrue(isinstance(res['d'], list))
self.assertEqual(3, len(res['d']))
self.assertEqual(a_val, res['d'][0])
self.assertEqual(b_val, res['d'][1])
self.assertEqual(c_val, res['d'][2])
self.assertTrue(isinstance(res['e'], tuple))
self.assertEqual(3, len(res['e']))
self.assertEqual(a_val, res['e'][0])
self.assertEqual(b_val, res['e'][1])
self.assertEqual(c_val, res['e'][2])
self.assertTrue(isinstance(res['f'], ABC))
self.assertEqual(a_val, res['f'].a)
self.assertEqual(b_val, res['f'].b)
self.assertEqual(c_val, res['f'].c)
self.assertTrue(isinstance(res['g'], dict))
self.assertEqual(3, len(res['g']))
self.assertEqual(a_val, res['g']['a'])
self.assertEqual(b_val, res['g']['b'])
self.assertEqual(c_val, res['g']['c'])
def testFetchTensorObject(self):
with session.Session() as s:
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
c = math_ops.matmul(a, b)
results_with_list = s.run([c])
self.assertAllEqual([[4.0, 4.0, 4.0]], results_with_list[0])
results_with_single = s.run(c)
self.assertAllEqual([[4.0, 4.0, 4.0]], results_with_single)
results_with_get = c.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], results_with_get)
a_val, b_val = s.run([a, b]) # Test multiple fetches.
self.assertAllEqual([[1.0, 1.0]], a_val)
self.assertAllEqual([[2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], b_val)
results_with_dict = s.run({'a': [a], 'b': b, 'z': [a, b]})
self.assertAllEqual([[1.0, 1.0]], results_with_dict['a'][0])
self.assertAllEqual([[2.0, 2.0, 2.0], [2.0, 2.0, 2.0]],
results_with_dict['b'])
self.assertAllEqual(results_with_dict['a'][0], results_with_dict['z'][0])
self.assertAllEqual(results_with_dict['b'], results_with_dict['z'][1])
# Test nested structures
results_with_nested_list = s.run([[[a, b], b], a, [a, b]])
self.assertAllEqual([[1.0, 1.0]], results_with_nested_list[0][0][0])
self.assertAllEqual([[2.0, 2.0, 2.0], [2.0, 2.0, 2.0]],
results_with_nested_list[0][0][1])
self.assertAllEqual(results_with_nested_list[0][0][0],
results_with_nested_list[1])
self.assertAllEqual(results_with_nested_list[1],
results_with_nested_list[2][0])
self.assertAllEqual(results_with_nested_list[0][0][1],
results_with_nested_list[0][1])
self.assertAllEqual(results_with_nested_list[0][1],
results_with_nested_list[2][1])
def testFetchScalar(self):
with session.Session() as s:
for scalar in np.int32, np.int64, np.float16, np.float32, np.float64:
x = scalar(7)
y = scalar(8)
tf_x = constant_op.constant(x, shape=[])
tf_y = constant_op.constant(y)
tf_xy = math_ops.add(tf_x, tf_y)
# Single fetch
xy = s.run(tf_xy)
self.assertEqual(scalar, type(xy))
self.assertEqual(x + y, xy)
# List fetch
xy, = s.run([tf_xy])
self.assertEqual(scalar, type(xy))
self.assertEqual(x + y, xy)
# Dict fetch
xy = s.run({'xy': tf_xy})['xy']
self.assertEqual(scalar, type(xy))
self.assertEqual(x + y, xy)
# Nested list fetch
xy = s.run([[[tf_xy]], tf_xy, [tf_xy]])
self.assertAllEqual(xy, [[[x + y]], x + y, [x + y]])
self.assertEqual(scalar, type(xy[0][0][0]))
self.assertEqual(scalar, type(xy[1]))
self.assertEqual(scalar, type(xy[2][0]))
def testFetchOperationObject(self):
with session.Session() as s:
a = constant_op.constant(1.0, shape=[1, 2])
v = variables.Variable(a, name='testFetchOperationObject_v')
s.run(v.initializer)
v_val = s.run(v)
self.assertAllEqual([[1.0, 1.0]], v_val)
def testFetchSparseTensor(self):
with session.Session() as s:
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
values = np.array([1.0, 2.0]).astype(np.float32)
shape = np.array([7, 9, 2]).astype(np.int64)
sp = sparse_tensor.SparseTensor(
constant_op.constant(indices), constant_op.constant(values),
constant_op.constant(shape))
# Single fetch, use as tuple
sp_out = s.run(sp)
indices_out, values_out, shape_out = sp_out
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Single fetch, use as SparseTensorValue
sp_out = s.run(sp)
self.assertAllEqual(sp_out.indices, indices)
self.assertAllEqual(sp_out.values, values)
self.assertAllEqual(sp_out.dense_shape, shape)
# Tuple fetch, use as tuple
indices_out, values_out, shape_out = s.run(sp)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# List fetch, use as tuple
(indices_out, values_out, shape_out), = s.run([sp])
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# List fetch, use as SparseTensorValue
sp_out, = s.run([sp])
self.assertAllEqual(sp_out.indices, indices)
self.assertAllEqual(sp_out.values, values)
self.assertAllEqual(sp_out.dense_shape, shape)
# Dict fetch (single value), use as tuple
indices_out, values_out, shape_out = s.run({'sp': sp})['sp']
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Dict fetch (list value), use as tuple
(indices_out, values_out, shape_out), = s.run({'sp': [sp]})['sp']
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Dict fetch, use as SparseTensorValue
sp_out = s.run({'sp': sp})['sp']
self.assertAllEqual(sp_out.indices, indices)
self.assertAllEqual(sp_out.values, values)
self.assertAllEqual(sp_out.dense_shape, shape)
# Nested list fetch use as tuple
sp_out = s.run([[[sp]], sp])
indices_out, values_out, shape_out = sp_out[0][0][0]
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
indices_out, values_out, shape_out = sp_out[1]
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Nested list fetch, use as SparseTensorValue
sp_out = s.run([[[sp]], sp])
self.assertAllEqual(sp_out[0][0][0].indices, indices)
self.assertAllEqual(sp_out[0][0][0].values, values)
self.assertAllEqual(sp_out[0][0][0].dense_shape, shape)
self.assertAllEqual(sp_out[1].indices, indices)
self.assertAllEqual(sp_out[1].values, values)
self.assertAllEqual(sp_out[1].dense_shape, shape)
def testFeedSparseTensor(self):
with session.Session() as s:
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
values = np.array([1.0, 2.0]).astype(np.float32)
shape = np.array([7, 9, 2]).astype(np.int64)
sp = sparse_tensor.SparseTensor(
array_ops.placeholder(dtype=np.int64, shape=(2, 3)),
array_ops.placeholder(dtype=np.float32, shape=(2,)),
array_ops.placeholder(dtype=np.int64, shape=(3,)),
)
sp_indices = array_ops.identity(sp.indices)
sp_values = array_ops.identity(sp.values)
sp_shape = array_ops.identity(sp.dense_shape)
sp2 = sparse_tensor.SparseTensor(sp_indices, sp_values, sp_shape)
# Feed with tuple
indices_out, values_out, shape_out = s.run(
[sp_indices, sp_values, sp_shape], {
sp: (indices, values, shape)
})
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Feed with tuple, fetch sp directly
sp_out = s.run(sp, {sp: (indices, values, shape)})
self.assertAllEqual(sp_out.indices, indices)
self.assertAllEqual(sp_out.values, values)
self.assertAllEqual(sp_out.dense_shape, shape)
# Feed with SparseTensorValue
indices_out, values_out, shape_out = s.run(
[sp_indices, sp_values, sp_shape], {
sp: sparse_tensor.SparseTensorValue(indices, values, shape)
})
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Feed with SparseTensorValue, fetch SparseTensorValue
sp2_out = s.run(sp2, {
sp: sparse_tensor.SparseTensorValue(indices, values, shape)
})
self.assertAllEqual(sp2_out.indices, indices)
self.assertAllEqual(sp2_out.values, values)
self.assertAllEqual(sp2_out.dense_shape, shape)
# Feed SparseTensorValue and fetch sp directly.
sp_out = s.run(sp, {
sp: sparse_tensor.SparseTensorValue(indices, values, shape)
})
self.assertAllEqual(sp_out.indices, indices)
self.assertAllEqual(sp_out.values, values)
self.assertAllEqual(sp_out.dense_shape, shape)
def testFeedSparsePlaceholder(self):
with session.Session() as s:
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
values = np.array([1.0, 2.0]).astype(np.float32)
shape = np.array([7, 9, 2]).astype(np.int64)
sp = array_ops.sparse_placeholder(dtype=np.float32, name='placeholder1')
sp_indices = array_ops.identity(sp.indices)
sp_values = array_ops.identity(sp.values)
sp_shape = array_ops.identity(sp.dense_shape)
sp2 = sparse_tensor.SparseTensor(sp_indices, sp_values, sp_shape)
# Feed with tuple
indices_out, values_out, shape_out = s.run(
[sp_indices, sp_values, sp_shape], {
sp: (indices, values, shape)
})
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Feed with SparseTensorValue
indices_out, values_out, shape_out = s.run(
[sp_indices, sp_values, sp_shape], {
sp: sparse_tensor.SparseTensorValue(indices, values, shape)
})
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Feed with SparseTensorValue, fetch SparseTensorValue
sp2_out = s.run(sp2, {
sp: sparse_tensor.SparseTensorValue(indices, values, shape)
})
self.assertAllEqual(sp2_out.indices, indices)
self.assertAllEqual(sp2_out.values, values)
self.assertAllEqual(sp2_out.dense_shape, shape)
def testFeedSparsePlaceholderPartialShape(self):
with session.Session() as s:
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
values = np.array([1.0, 2.0]).astype(np.float32)
shape = np.array([7, 9, 2]).astype(np.int64)
sp = array_ops.sparse_placeholder(
shape=[None, 9, 2], dtype=np.float32, name='placeholder1')
sp_indices = array_ops.identity(sp.indices)
sp_values = array_ops.identity(sp.values)
sp_shape = array_ops.identity(sp.dense_shape)
sp2 = sparse_tensor.SparseTensor(sp_indices, sp_values, sp_shape)
# Feed with tuple
indices_out, values_out, shape_out = s.run(
[sp_indices, sp_values, sp_shape], {
sp: (indices, values, shape)
})
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Feed with SparseTensorValue
indices_out, values_out, shape_out = s.run(
[sp_indices, sp_values, sp_shape], {
sp: sparse_tensor.SparseTensorValue(indices, values, shape)
})
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
# Feed with SparseTensorValue, fetch SparseTensorValue
sp2_out = s.run(sp2, {
sp: sparse_tensor.SparseTensorValue(indices, values, shape)
})
self.assertAllEqual(sp2_out.indices, indices)
self.assertAllEqual(sp2_out.values, values)
self.assertAllEqual(sp2_out.dense_shape, shape)
def testFeedSparsePlaceholderConstantShape(self):
with session.Session() as s:
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
values = np.array([1.0, 2.0]).astype(np.float32)
shape = np.array([7, 9, 2]).astype(np.int64)
sp = array_ops.sparse_placeholder(
dtype=np.float32, shape=shape, name='placeholder1')
self.assertAllEqual(sp.dense_shape.eval(session=s), shape)
self.assertAllEqual(tensor_util.constant_value(sp.dense_shape), shape)
sp_indices = array_ops.identity(sp.indices)
sp_values = array_ops.identity(sp.values)
sp_shape = array_ops.identity(sp.dense_shape)
# Feed with tuple
indices_out, values_out, shape_out = s.run(
[sp_indices, sp_values, sp_shape], {
sp: (indices, values)
})
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(values_out, values)
self.assertAllEqual(shape_out, shape)
def testFetchIndexedSlices(self):
with session.Session() as s:
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
values = np.array([1.0, 2.0]).astype(np.float32)
dense_shape = np.array([7, 9, 2]).astype(np.int64)
ind = ops.IndexedSlices(
constant_op.constant(values), constant_op.constant(indices),
constant_op.constant(dense_shape))
# Single fetch, use as tuple
ind_out = s.run(ind)
values_out, indices_out, dense_shape_out = ind_out
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# Single fetch, use as IndexedSlicesValue
ind_out = s.run(ind)
self.assertAllEqual(ind_out.values, values)
self.assertAllEqual(ind_out.indices, indices)
self.assertAllEqual(ind_out.dense_shape, dense_shape)
# Tuple fetch, use as tuple
values_out, indices_out, dense_shape_out = s.run(ind)
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# List fetch, use as tuple
(values_out, indices_out, dense_shape_out), = s.run([ind])
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# List fetch, use as IndexedSlicesValue
ind_out, = s.run([ind])
self.assertAllEqual(ind_out.values, values)
self.assertAllEqual(ind_out.indices, indices)
self.assertAllEqual(ind_out.dense_shape, dense_shape)
def testFeedIndexedSlices(self):
with session.Session() as s:
values = np.array([1.0, 2.0]).astype(np.float32)
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
dense_shape = np.array([7, 9, 2]).astype(np.int64)
ind = ops.IndexedSlices(
array_ops.placeholder(dtype=np.float32, shape=(2,)),
array_ops.placeholder(dtype=np.int64, shape=(2, 3)),
array_ops.placeholder(dtype=np.int64, shape=(3,)),
)
ind_values = array_ops.identity(ind.values)
ind_indices = array_ops.identity(ind.indices)
ind_dense_shape = array_ops.identity(ind.dense_shape)
ind2 = ops.IndexedSlices(ind_values, ind_indices, ind_dense_shape)
# Feed with tuple
values_out, indices_out, dense_shape_out = s.run(
[ind_values, ind_indices, ind_dense_shape], {
ind: (values, indices, dense_shape)
})
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# Feed with IndexedSlicesValue
values_out, indices_out, dense_shape_out = s.run(
[ind_values, ind_indices, ind_dense_shape], {
ind: ops.IndexedSlicesValue(values, indices, dense_shape)
})
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# Feed with IndexedSlicesValue, fetch IndexedSlicesValue
ind2_out = s.run(ind2, {
ind: ops.IndexedSlicesValue(values, indices, dense_shape)
})
self.assertAllEqual(ind2_out.values, values)
self.assertAllEqual(ind2_out.indices, indices)
self.assertAllEqual(ind2_out.dense_shape, dense_shape)
def testFetchIndexedSlicesWithoutDenseShape(self):
with session.Session() as s:
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
values = np.array([1.0, 2.0]).astype(np.float32)
dense_shape = None
ind = ops.IndexedSlices(
constant_op.constant(values), constant_op.constant(indices), None)
# Single fetch, use as tuple
ind_out = s.run(ind)
values_out, indices_out, dense_shape_out = ind_out
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# Single fetch, use as IndexedSlicesValue
ind_out = s.run(ind)
self.assertAllEqual(ind_out.values, values)
self.assertAllEqual(ind_out.indices, indices)
self.assertAllEqual(ind_out.dense_shape, dense_shape)
# Tuple fetch, use as tuple
values_out, indices_out, dense_shape_out = s.run(ind)
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# List fetch, use as tuple
(values_out, indices_out, dense_shape_out), = s.run([ind])
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
self.assertAllEqual(dense_shape_out, dense_shape)
# List fetch, use as IndexedSlicesValue
ind_out, = s.run([ind])
self.assertAllEqual(ind_out.values, values)
self.assertAllEqual(ind_out.indices, indices)
self.assertAllEqual(ind_out.dense_shape, dense_shape)
def testFeedIndexedSlicesWithoutDenseShape(self):
with session.Session() as s:
values = np.array([1.0, 2.0]).astype(np.float32)
indices = np.array([[3, 2, 0], [4, 5, 1]]).astype(np.int64)
dense_shape = None
ind = ops.IndexedSlices(
array_ops.placeholder(dtype=np.float32, shape=(2,)),
array_ops.placeholder(dtype=np.int64, shape=(2, 3)), None)
ind_values = array_ops.identity(ind.values)
ind_indices = array_ops.identity(ind.indices)
ind2 = ops.IndexedSlices(ind_values, ind_indices)
# Feed with tuple
values_out, indices_out = s.run([ind_values, ind_indices], {
ind: (values, indices)
})
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
# Feed with IndexedSlicesValue
values_out, indices_out = s.run([ind_values, ind_indices], {
ind: ops.IndexedSlicesValue(values, indices, dense_shape)
})
self.assertAllEqual(values_out, values)
self.assertAllEqual(indices_out, indices)
# Feed with IndexedSlicesValue, fetch IndexedSlicesValue
ind2_out = s.run(ind2, {
ind: ops.IndexedSlicesValue(values, indices, dense_shape)
})
self.assertAllEqual(ind2_out.values, values)
self.assertAllEqual(ind2_out.indices, indices)
self.assertAllEqual(ind2_out.dense_shape, dense_shape)
def testExtendWithStatelessOperations(self):
with session.Session() as s:
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
c = math_ops.matmul(a, b)
c_val = s.run(c)
self.assertAllEqual([[4.0, 4.0, 4.0]], c_val)
d = constant_op.constant([1.0, 2.0, 3.0], shape=[3, 1])
e = math_ops.matmul(c, d)
# Extend will happen here.
e_val = s.run(e)
self.assertAllEqual([[24.0]], e_val)
def testExtendWithStatefulOperations(self):
with session.Session() as s:
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
c = math_ops.matmul(a, b)
v = variables.Variable(c, name='testExtendWithStatefulOperations_v')
v.initializer.run()
v_val = v.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], v_val)
d = constant_op.constant(3.0, shape=[2, 3])
e = math_ops.matmul(a, d)
assign_e_to_v = state_ops.assign(v, e)
# Extend will happen here.
e_val = e.eval()
self.assertAllEqual([[6.0, 6.0, 6.0]], e_val)
v_val = v.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], v_val)
s.run(assign_e_to_v)
v_val = v.eval()
self.assertAllEqual([[6.0, 6.0, 6.0]], v_val)
def testExtendWithGroupBy(self):
with session.Session() as s:
a = constant_op.constant(1.0, shape=[1, 2])
p = variables.Variable(a, name='testExtendWithGroupBy_p')
a_val = a.eval() # Force an Extend after this op.
self.assertAllEqual([[1.0, 1.0]], a_val)
b = constant_op.constant(2.0, shape=[1, 2])
q = variables.Variable(b, name='testExtendWithGroupBy_q')
# Extend will happen here.
init = control_flow_ops.group(p.initializer, q.initializer)
s.run(init)
p_val, q_val = s.run([p, q])
self.assertAllEqual([[1.0, 1.0]], p_val)
self.assertAllEqual([[2.0, 2.0]], q_val)
def testTensorGetMethod(self):
with session.Session():
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
c = math_ops.matmul(a, b)
c_val = c.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], c_val)
fed_c_val = c.eval(feed_dict={a.name: [[4.0, 4.0]]})
self.assertAllEqual([[16.0, 16.0, 16.0]], fed_c_val)
@test_util.run_v1_only('b/120545219')
def testOperationRunMethod(self):
with session.Session():
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[1, 2], name='b')
v = variables.VariableV1(a, a.dtype)
assign_a_to_v = state_ops.assign(v, a)
assign_a_to_v.eval()
v_val = v.eval()
self.assertAllEqual([[1.0, 1.0]], v_val)
assign_b_to_v = state_ops.assign(v, b)
assign_b_to_v.eval()
v_val = v.eval()
self.assertAllEqual([[2.0, 2.0]], v_val)
assign_b_to_v.eval(feed_dict={'b:0': [[3.0, 3.0]]})
v_val = v.eval()
self.assertAllEqual([[3.0, 3.0]], v_val)
def testDefaultGraph(self):
with session.Session() as s:
self.assertEqual(ops.get_default_graph(), s.graph)
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
self.assertEqual(ops.get_default_graph(), a.graph)
self.assertEqual(ops.get_default_graph(), b.graph)
c = math_ops.matmul(a, b)
v = variables.Variable(c, name='testDefaultGraph_v')
v.initializer.run()
v_val = v.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], v_val)
d = constant_op.constant(3.0, shape=[2, 3])
e = math_ops.matmul(a, d)
assign_e_to_v = state_ops.assign(v, e)
e_val = e.eval()
self.assertAllEqual([[6.0, 6.0, 6.0]], e_val)
v_val = v.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], v_val)
s.run(assign_e_to_v)
v_val = v.eval()
self.assertAllEqual([[6.0, 6.0, 6.0]], v_val)
self.assertEqual(ops.get_default_graph(), s.graph)
def _testDefaultGraphInThread(self, constructed_event, continue_event, i):
with session.Session() as s:
self.assertEqual(ops.get_default_graph(), s.graph)
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
c = math_ops.matmul(a, b)
v = variables.Variable(c, name='var_%d' % i)
# Block here until all threads have constructed their graph.
constructed_event.set()
continue_event.wait()
assign_c_to_v = state_ops.assign(v, c)
v.initializer.run()
assign_c_to_v.eval()
v_val = v.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], v_val)
d = constant_op.constant(3.0, shape=[2, 3])
e = math_ops.matmul(a, d)
assign_e_to_v = state_ops.assign(v, e)
e_val = e.eval()
self.assertAllEqual([[6.0, 6.0, 6.0]], e_val)
v_val = v.eval()
self.assertAllEqual([[4.0, 4.0, 4.0]], v_val)
s.run(assign_e_to_v)
v_val = v.eval()
self.assertAllEqual([[6.0, 6.0, 6.0]], v_val)
self.assertEqual(ops.get_default_graph(), s.graph)
def testDefaultGraphWithThreads(self):
# Fork ten threads that use their thread-local default graph.
threads = []
constructed_events = [threading.Event() for _ in range(10)]
continue_event = threading.Event()
for i, constructed_event in enumerate(constructed_events):
t = self.checkedThread(
target=self._testDefaultGraphInThread,
args=(constructed_event, continue_event, i))
threads.append(t)
for t in threads:
t.start()
for constructed_event in constructed_events:
constructed_event.wait()
continue_event.set()
for t in threads:
t.join()
def testParallelRun(self):
with session.Session() as sess:
c = constant_op.constant(5.0)
ev = threading.Event()
def run_step():
ev.wait()
val = c.eval(session=sess)
self.assertEqual(val, 5.0)
threads = [self.checkedThread(target=run_step) for _ in range(100)]
for t in threads:
t.start()
ev.set()
for t in threads:
t.join()
@staticmethod
def _build_graph():
time.sleep(random.random() * 0.1)
# Do some graph construction. Try to exercise non-trivial paths.
graph = ops.get_default_graph()
gdef = None
for _ in range(10):
x = array_ops.placeholder(dtype=dtypes.float32)
with ops.colocate_with(x):
y = array_ops.placeholder(dtype=dtypes.float32)
with ops.device('/cpu:0'):
z = control_flow_ops.while_loop(
lambda x, y: x < 10, lambda x, y: (x + 1, x * y), [x, y])
with graph._attr_scope({'_a': attr_value_pb2.AttrValue(b=False)}):
gradients_impl.gradients(z, [x, y])
if gdef is None:
gdef = graph.as_graph_def()
else:
importer.import_graph_def(gdef, name='import')
@test_util.run_v1_only('b/120545219')
def testParallelRunAndSingleBuild(self):
with session.Session() as sess:
c = constant_op.constant(5.0)
stop = threading.Event()
def run_loop():
while not stop.is_set():
time.sleep(random.random() * 0.1)
self.assertEqual(sess.run(c), 5.0)
threads = [self.checkedThread(target=run_loop) for _ in range(10)]
for t in threads:
t.start()
SessionTest._build_graph()
stop.set()
for t in threads:
t.join()
@test_util.run_v1_only('b/120545219')
def testParallelRunAndParallelBuild(self):
with session.Session() as sess:
c = constant_op.constant(5.0)
stop = threading.Event()
def run_loop():
while not stop.is_set():
time.sleep(random.random() * 0.1)
self.assertEqual(sess.run(c), 5.0)
run_threads = [self.checkedThread(target=run_loop) for _ in range(10)]
for t in run_threads:
t.start()
build_threads = [self.checkedThread(target=SessionTest._build_graph)
for _ in range(10)]
for t in build_threads:
t.start()
for t in build_threads:
t.join()
# Let the run_threads run until the build threads are finished.
stop.set()
for t in run_threads:
t.join()
def testRunFeedDict(self):
with session.Session() as s:
x = array_ops.zeros([2])
y = s.run(2 * x, feed_dict={x: np.ones(2).astype(np.float32)})
self.assertAllEqual(y, 2 * np.ones(2))
y = s.run(2 * x, feed_dict={x.name: np.ones(2).astype(np.float32)})
self.assertAllEqual(y, 2 * np.ones(2))
y = s.run(2 * x, feed_dict={x: [1, 1]})
assert (y == 2 * np.ones(2)).all()
# Test nested tuple keys
z = (((array_ops.zeros([2]),),), array_ops.zeros([2]),
(array_ops.zeros([2]),))
result = [z[0][0][0] * 2, z[1] * 2, z[2][0] * 2]
values = (((np.array([1, 1]),),), np.array([2, 2]), (np.array([3, 3]),))
result_value = s.run(result, feed_dict={z: values})
self.assertAllEqual(result_value[0], 2 * np.ones(2))
self.assertAllEqual(result_value[1], 2 * np.array([2, 2]))
self.assertAllEqual(result_value[2], 2 * np.array([3, 3]))
def testGraphDef(self):
with session.Session() as sess:
self.assertProtoEquals('versions { producer: %d min_consumer: %d }' %
(versions.GRAPH_DEF_VERSION,
versions.GRAPH_DEF_VERSION_MIN_CONSUMER),
sess.graph_def)
c = constant_op.constant(5.0, name='c')
self.assertEquals(len(sess.graph_def.node), 1)
d = constant_op.constant(6.0, name='d')
self.assertEquals(len(sess.graph_def.node), 2)
self.assertAllEqual(c.eval(), 5.0)
self.assertAllEqual(d.eval(), 6.0)
e = constant_op.constant(7.0, name='e')
self.assertEquals(len(sess.graph_def.node), 3)
self.assertAllEqual(e.eval(), 7.0)
def testUseAfterClose(self):
with session.Session() as sess:
c = constant_op.constant(5.0)
self.assertAllEqual(sess.run(c), 5.0)
with self.assertRaisesWithPredicateMatch(
RuntimeError, lambda e: 'Attempted to use a closed Session.' in str(e)):
sess.run(c)
def testUseAfterCloseConcurrent(self):
with session.Session() as sess:
c = constant_op.constant(5.0)
self.assertAllEqual(sess.run(c), 5.0)
def update_thread():
with self.assertRaisesWithPredicateMatch(
RuntimeError,
lambda e: 'Attempted to use a closed Session.' in str(e)):
while True:
sess.run(c)
t = threading.Thread(target=update_thread)
t.start()
time.sleep(0.1)
sess.close()
t.join()
def testUseEmptyGraph(self):
with session.Session() as sess:
with self.assertRaisesRegexp(RuntimeError, 'The Session graph is empty.'):
sess.run([])
with self.assertRaisesRegexp(RuntimeError, 'The Session graph is empty.'):
sess.run(())
with self.assertRaisesRegexp(RuntimeError, 'The Session graph is empty.'):
sess.run({})
@test_util.run_v1_only('b/120545219')
def testNotEntered(self):
# pylint: disable=protected-access
self.assertEqual(ops._default_session_stack.get_default(), None)
# pylint: enable=protected-access
with ops.device('/cpu:0'):
sess = session.Session()
c_1 = constant_op.constant(5.0)
with sess.graph.as_default():
c_2 = constant_op.constant(5.0)
self.assertEqual(c_1.graph, c_2.graph)
self.assertEqual(sess.run(c_2), 5.0)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: 'No default session is registered.' in str(e)):
c_2.eval()
@test_util.run_v1_only('b/120545219')
def testInteractive(self):
with ops.device('/cpu:0'):
sess = session.InteractiveSession()
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
c = math_ops.matmul(a, b)
self.assertAllEqual([[4.0, 4.0, 4.0]], c.eval())
d = constant_op.constant([1.0, 2.0, 3.0], shape=[3, 1])
e = math_ops.matmul(c, d)
self.assertAllEqual([[24.0]], e.eval())
sess.close()
@test_util.run_v1_only('b/120545219')
def testMultipleInteractiveSessionsWarning(self):
# Reinitialize the global state to ensure that the expected warnings will
# be emitted.
session.InteractiveSession._active_session_count = 0 # pylint: disable=protected-access
sess = session.InteractiveSession()
sess.run(constant_op.constant(4.0)) # Run so that the session is "opened".
sess.close()
# Opening and closing interactive sessions serially should not warn.
with warnings.catch_warnings(record=True) as w:
sess = session.InteractiveSession()
sess.close()
self.assertEqual(0, len(w))
with warnings.catch_warnings(record=True) as w:
sess = session.InteractiveSession()
self.assertEqual(0, len(w))
with warnings.catch_warnings(record=True) as w:
sess2 = session.InteractiveSession()
self.assertEqual(1, len(w))
self.assertTrue('An interactive session is already active. This can cause '
'out-of-memory errors in some cases. You must explicitly '
'call `InteractiveSession.close()` to release resources '
'held by the other session(s).' in str(w[0].message))
sess2.close()
sess.close()
@test_util.run_v1_only('b/120545219')
def testInteractivePlacePrunedGraph(self):
sess = session.InteractiveSession()
# Build a graph that has a bad op in it (no kernel).
#
# This test currently does not link in any GPU kernels,
# which is why placing this is invalid. If at some point
# GPU kernels are added to this test, some other different
# op / device combo should be chosen.
with ops.device('/device:GPU:0'):
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(1.0, shape=[1, 2])
# Only run the valid op, this should work.
b.eval()
with self.assertRaises(errors.InvalidArgumentError):
a.eval()
sess.close()
@test_util.run_v1_only('b/120545219')
def testDefaultSessionPlacePrunedGraph(self):
sess = session.Session()
# Build a graph that has a bad op in it (no kernel).
#
# This test currently does not link in any GPU kernels,
# which is why placing this is invalid. If at some point
# GPU kernels are added to this test, some other different
# op / device combo should be chosen.
with ops.device('/device:GPU:0'):
_ = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(1.0, shape=[1, 2])
with self.assertRaises(errors.InvalidArgumentError):
# Even though we don't run the bad op, we place the entire
# graph, which should fail with a non-interactive session.
sess.run(b)
sess.close()
def testSharedGraph(self):
with ops.Graph().as_default() as g, ops.device('/cpu:0'):
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[2, 3])
c = math_ops.matmul(a, b)
with session.Session(graph=g) as sess1:
with session.Session(graph=g) as sess2:
self.assertAllEqual(sess1.run(c), sess2.run(c))
def testDuplicatedInputs(self):
with session.Session() as sess:
a = constant_op.constant(1.0, shape=[1, 2])
b = constant_op.constant(2.0, shape=[1, 3])
a_val, b_val, a2_val = sess.run([a, b, a])
self.assertAllEqual(a_val, [[1.0, 1.0]])
self.assertAllEqual(b_val, [[2.0, 2.0, 2.0]])
self.assertAllEqual(a2_val, [[1.0, 1.0]])
def testFeedAndFetch(self):
with session.Session() as sess:
for dtype in [
dtypes.float16, dtypes.float32, dtypes.float64, dtypes.int32,
dtypes.uint8, dtypes.int16, dtypes.int8, dtypes.int64, dtypes.bool,
dtypes.complex64, dtypes.complex128
]:
for shape in [(32, 4, 128), (37,), (2, 0, 6), (0, 0, 0)]:
np_dtype = dtype.as_numpy_dtype
feed_t = array_ops.placeholder(dtype=dtype, shape=shape)
out_t = array_ops.identity(feed_t)
np_array = np.random.randint(-10, 10, shape)
if dtype == dtypes.bool:
np_array = np_array > 0
elif dtype == dtypes.complex64:
np_array = np.sqrt(np_array.astype(np_dtype))
elif dtype == dtypes.complex64:
np_array = np.sqrt(np_array.astype(np_dtype))
else:
np_array = np_array.astype(np_dtype)
self.assertAllEqual(np_array,
sess.run(out_t, feed_dict={
feed_t: np_array
}))
# Check that we can also get the feed back.
self.assertAllEqual(np_array,
sess.run(feed_t, feed_dict={
feed_t: np_array
}))
# Also check that we can get both back.
out_v, feed_v = sess.run(
[out_t, feed_t], feed_dict={
feed_t: np_array
})
self.assertAllEqual(np_array, out_v)
self.assertAllEqual(np_array, feed_v)
feed_fetch_runner = sess.make_callable([out_t, feed_t], [feed_t])
out_v, feed_v = feed_fetch_runner(np_array)
self.assertAllEqual(np_array, out_v)
self.assertAllEqual(np_array, feed_v)
def testMakeCallableOnTensorWithRunOptions(self):
with session.Session() as sess:
a = constant_op.constant(42.0)
tensor_runner = sess.make_callable(a, accept_options=True)
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
self.assertEqual(0, len(run_metadata.step_stats.dev_stats))
res = tensor_runner(options=run_options, run_metadata=run_metadata)
self.assertEqual(42.0, res)
self.assertGreater(len(run_metadata.step_stats.dev_stats), 0)
def testMakeCallableOnOperationWithRunOptions(self):
with session.Session() as sess:
a = variables.Variable(42.0)
b = state_ops.assign_add(a, 1.0)
sess.run(a.initializer)
tensor_runner = sess.make_callable(b.op, accept_options=True)
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
self.assertEqual(0, len(run_metadata.step_stats.dev_stats))
tensor_runner(options=run_options, run_metadata=run_metadata)
self.assertEqual(43.0, sess.run(a))
self.assertGreater(len(run_metadata.step_stats.dev_stats), 0)
def testMakeCallableWithFeedListAndRunOptions(self):
with session.Session() as sess:
ph = array_ops.placeholder(dtypes.float32)
a = math_ops.add(ph, 1.0)
tensor_runner = sess.make_callable(
a, feed_list=[ph.name], accept_options=True)
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
self.assertEqual(0, len(run_metadata.step_stats.dev_stats))
self.assertAllClose(42.0,
tensor_runner(
41.0,
options=run_options,
run_metadata=run_metadata))
self.assertGreater(len(run_metadata.step_stats.dev_stats), 0)
def testOptimizedMakeCallable(self):
with session.Session() as sess:
ph = array_ops.placeholder(dtypes.float32)
a = math_ops.add(ph, 1.0)
callable_opts = config_pb2.CallableOptions()
callable_opts.feed.append(ph.name)
callable_opts.fetch.append(a.name)
for _ in range(3):
callable_fn = sess._make_callable_from_options(callable_opts)
for _ in range(5):
self.assertEqual([2.0], callable_fn(np.array(1.0, dtype=np.float32)))
def testOptimizedMakeCallableWithRunMetadata(self):
with session.Session() as sess:
ph = array_ops.placeholder(dtypes.float32)
a = math_ops.add(ph, 1.0)
callable_opts = config_pb2.CallableOptions()
callable_opts.feed.append(ph.name)
callable_opts.fetch.append(a.name)
callable_opts.run_options.trace_level = config_pb2.RunOptions.FULL_TRACE
callable_fn = sess._make_callable_from_options(callable_opts)
run_metadata = config_pb2.RunMetadata()
self.assertEqual([2.0], callable_fn(np.array(1.0, dtype=np.float32),
run_metadata=run_metadata))
self.assertGreater(len(run_metadata.step_stats.dev_stats), 0)
def testFeedError(self):
with session.Session() as sess:
feed_t = array_ops.placeholder(dtype=dtypes.float32)
out_t = array_ops.identity(feed_t)
feed_val = constant_op.constant(5.0)
with self.assertRaisesRegexp(TypeError, 'cannot be a tf.Tensor object'):
sess.run(out_t, feed_dict={feed_t: feed_val})
with self.assertRaisesRegexp(TypeError, 'cannot be a tf.Tensor object'):
out_t.eval(feed_dict={feed_t: feed_val})
with self.assertRaisesRegexp(TypeError, 'cannot be a tf.Tensor object'):
out_t.op.run(feed_dict={feed_t: feed_val})
def testFeedPrecisionLossError(self):
with session.Session() as sess:
largest_int64 = np.iinfo(np.int64).max
feed_int_implicit_int32 = constant_op.constant(1)
feed_int_explicit_int32 = constant_op.constant(1, dtype=dtypes.int32)
out_t = constant_op.constant(1.0)
with self.assertRaisesRegexp(TypeError,
'is not compatible with Tensor type'):
sess.run(out_t, feed_dict={feed_int_implicit_int32: largest_int64})
with self.assertRaisesRegexp(TypeError,
'is not compatible with Tensor type'):
sess.run(out_t, feed_dict={feed_int_explicit_int32: largest_int64})
def testStringFetch(self):
with session.Session():
for shape in [(32, 4, 128), (37,), (2, 0, 6), (0, 0, 0)]:
size = 1
for s in shape:
size *= s
c_list = np.array(
[compat.as_bytes(str(i)) for i in xrange(size)],
dtype=np.object).reshape(shape) if size > 0 else []
c = constant_op.constant(c_list)
self.assertAllEqual(c.eval(), c_list)
def testStringFeed(self):
with session.Session() as sess:
for shape in [(32, 4, 128), (37,), (2, 0, 6), (0, 0, 0)]:
size = 1
for s in shape:
size *= s
c_list = np.array(
[compat.as_bytes(str(i)) for i in xrange(size)],
dtype=np.object).reshape(shape)
feed_t = array_ops.placeholder(dtype=dtypes.string, shape=shape)
c = array_ops.identity(feed_t)
self.assertAllEqual(sess.run(c, feed_dict={feed_t: c_list}), c_list)
self.assertAllEqual(
sess.run(feed_t, feed_dict={
feed_t: c_list
}), c_list)
c_v, feed_v = sess.run([c, feed_t], feed_dict={feed_t: c_list})
self.assertAllEqual(c_v, c_list)
self.assertAllEqual(feed_v, c_list)
def testStringFeedWithNullCharacters(self):
with session.Session():
c_list = [b'\n\x01\x00', b'\n\x00\x01']
feed_t = array_ops.placeholder(dtype=dtypes.string, shape=[2])
c = array_ops.identity(feed_t)
out = c.eval(feed_dict={feed_t: c_list})
self.assertEqual(c_list[0], out[0])
self.assertEqual(c_list[1], out[1])
def testStringFeedWithUnicode(self):
with session.Session():
c_list = [
u'\n\x01\x00', u'\n\x00\x01', u'\u26a3 unicode',
u'\U0001f60e deal with it'
]
feed_t = array_ops.placeholder(dtype=dtypes.string, shape=[len(c_list)])
c = array_ops.identity(feed_t)
out = c.eval(feed_dict={feed_t: c_list})
for i in range(len(c_list)):
self.assertEqual(c_list[i], out[i].decode('utf-8'))
out = c.eval(feed_dict={feed_t: np.array(c_list, dtype=np.object)})
for i in range(len(c_list)):
self.assertEqual(c_list[i], out[i].decode('utf-8'))
def testInvalidTargetFails(self):
with self.assertRaisesRegexp(
errors.NotFoundError,
'No session factory registered for the given session options'):
session.Session('INVALID_TARGET')
def testFetchByNameDifferentStringTypes(self):
with session.Session() as sess:
c = constant_op.constant(42.0, name='c')
d = constant_op.constant(43.0, name=u'd')
e = constant_op.constant(44.0, name=b'e')
f = constant_op.constant(45.0, name=r'f')
self.assertTrue(isinstance(c.name, six.text_type))
self.assertTrue(isinstance(d.name, six.text_type))
self.assertTrue(isinstance(e.name, six.text_type))
self.assertTrue(isinstance(f.name, six.text_type))
self.assertEqual(42.0, sess.run('c:0'))
self.assertEqual(42.0, sess.run(u'c:0'))
self.assertEqual(42.0, sess.run(b'c:0'))
self.assertEqual(42.0, sess.run(r'c:0'))
self.assertEqual(43.0, sess.run('d:0'))
self.assertEqual(43.0, sess.run(u'd:0'))
self.assertEqual(43.0, sess.run(b'd:0'))
self.assertEqual(43.0, sess.run(r'd:0'))
self.assertEqual(44.0, sess.run('e:0'))
self.assertEqual(44.0, sess.run(u'e:0'))
self.assertEqual(44.0, sess.run(b'e:0'))
self.assertEqual(44.0, sess.run(r'e:0'))
self.assertEqual(45.0, sess.run('f:0'))
self.assertEqual(45.0, sess.run(u'f:0'))
self.assertEqual(45.0, sess.run(b'f:0'))
self.assertEqual(45.0, sess.run(r'f:0'))
def testIncorrectGraph(self):
with ops.Graph().as_default() as g_1:
c_1 = constant_op.constant(1.0, name='c')
with ops.Graph().as_default() as g_2:
c_2 = constant_op.constant(2.0, name='c')
self.assertEqual('c', c_1.op.name)
self.assertEqual('c', c_2.op.name)
with session.Session(graph=g_1) as sess_1:
self.assertEqual(1.0, sess_1.run(c_1))
with self.assertRaises(ValueError):
sess_1.run(c_2)
with self.assertRaises(ValueError):
sess_1.run(c_2.op)
with session.Session(graph=g_2) as sess_2:
with self.assertRaises(ValueError):
sess_2.run(c_1)
with self.assertRaises(ValueError):
sess_2.run(c_1.op)
self.assertEqual(2.0, sess_2.run(c_2))
def testFeedDictKeyException(self):
with session.Session() as sess:
a = constant_op.constant(1.0, dtypes.float32, name='a')
with self.assertRaisesRegexp(TypeError, 'Cannot interpret feed_dict'):
sess.run(a, feed_dict={'a': [2.0]})
def testPerStepTrace(self):
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
with ops.device('/cpu:0'):
with session.Session() as sess:
sess.run(constant_op.constant(1.0))
self.assertTrue(not run_metadata.HasField('step_stats'))
sess.run(constant_op.constant(1.0), run_metadata=run_metadata)
self.assertTrue(not run_metadata.HasField('step_stats'))
sess.run(
constant_op.constant(1.0),
options=run_options,
run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
self.assertEquals(len(run_metadata.step_stats.dev_stats), 1)
def testRunOptionsRunMetadata(self):
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
with ops.device('/cpu:0'):
with session.Session() as sess:
# all combinations are valid
sess.run(constant_op.constant(1.0), options=None, run_metadata=None)
sess.run(
constant_op.constant(1.0), options=None, run_metadata=run_metadata)
self.assertTrue(not run_metadata.HasField('step_stats'))
sess.run(
constant_op.constant(1.0), options=run_options, run_metadata=None)
self.assertTrue(not run_metadata.HasField('step_stats'))
sess.run(
constant_op.constant(1.0),
options=run_options,
run_metadata=run_metadata)
self.assertTrue(run_metadata.HasField('step_stats'))
self.assertEquals(len(run_metadata.step_stats.dev_stats), 1)
def testFeedShapeCompatibility(self):
with session.Session() as sess:
some_tensor = constant_op.constant([2.0, 2.0, 2.0, 2.0])
new_shape = constant_op.constant([2, 2])
reshaped_tensor = array_ops.reshape(some_tensor, new_shape)
with self.assertRaisesRegexp(ValueError, 'Cannot feed value of shape'):
sess.run(reshaped_tensor, feed_dict={some_tensor: [1.0, 2.0, 3.0]})
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
'Input to reshape is a tensor with 4 values, '
'but the requested shape has 21'):
sess.run(reshaped_tensor, feed_dict={new_shape: [3, 7]})
def testInferShapesFalse(self):
with ops.Graph().as_default(), ops.device('/cpu:0'):
a = constant_op.constant([[1, 2]])
sess = session.Session()
self.assertFalse('_output_shapes' in sess.graph_def.node[0].attr)
# Avoid lint error regarding 'unused' var a.
self.assertTrue(a == a)
def testInferShapesTrue(self):
config = config_pb2.ConfigProto(
graph_options=config_pb2.GraphOptions(infer_shapes=True))
with ops.Graph().as_default(), ops.device('/cpu:0'):
a = constant_op.constant([[1, 2]])
sess = session.Session(config=config)
self.assertTrue('_output_shapes' in sess.graph_def.node[0].attr)
# Avoid lint error regarding 'unused' var a.
self.assertTrue(a == a)
def testBuildCostModel(self):
run_options = config_pb2.RunOptions()
config = config_pb2.ConfigProto(
allow_soft_placement=True,
graph_options=config_pb2.GraphOptions(build_cost_model=100))
with session.Session(config=config) as sess:
with ops.device('/device:GPU:0'):
a = array_ops.placeholder(dtypes.float32, shape=[])
b = math_ops.add(a, a)
c = array_ops.identity(b)
d = math_ops.multiply(c, c)
for step in xrange(120):
run_metadata = config_pb2.RunMetadata()
sess.run(
d,
feed_dict={a: 1.0},
options=run_options,
run_metadata=run_metadata)
if step == 99:
self.assertTrue(run_metadata.HasField('cost_graph'))
else:
self.assertFalse(run_metadata.HasField('cost_graph'))
def runTestOutputPartitionGraphs(self, sess):
run_options = config_pb2.RunOptions(output_partition_graphs=True)
a = constant_op.constant(1)
run_metadata = config_pb2.RunMetadata()
sess.run(a, options=run_options, run_metadata=run_metadata)
self.assertGreater(len(run_metadata.partition_graphs), 0)
sess.run(a, run_metadata=run_metadata)
self.assertEqual(len(run_metadata.partition_graphs), 0)
@test_util.run_v1_only('b/120545219')
def testOutputPartitionGraphsDirect(self):
self.runTestOutputPartitionGraphs(session.Session())
@test_util.run_v1_only('b/120545219')
def testOutputPartitionGraphsDistributed(self):
server = server_lib.Server.create_local_server()
self.runTestOutputPartitionGraphs(session.Session(server.target))
def testNonInteractiveSessionNesting(self):
sess1 = session.Session()
sess1_controller = sess1.as_default()
sess1_controller.__enter__()
sess2 = session.Session()
sess2_controller = sess2.as_default()
sess2_controller.__enter__()
with self.assertRaisesRegexp(AssertionError, 'Nesting violated'):
sess1_controller.__exit__(None, None, None)
ops._default_session_stack.reset()
def testInteractiveSessionNesting(self):
sess1 = session.InteractiveSession()
sess2 = session.InteractiveSession()
del sess1
del sess2
@test_util.run_v1_only('b/120545219')
def testAsDefault(self):
c = constant_op.constant(37)
sess = session.Session()
with sess.as_default():
self.assertEqual(37, c.eval())
# Ensure that the session remains valid even when it is not captured.
with session.Session().as_default():
self.assertEqual(37, c.eval())
def testReentry(self):
sess = session.Session()
with self.assertRaisesRegexp(RuntimeError, 'not re-entrant'):
with sess:
with sess:
pass
def testInvalidArgument(self):
with self.assertRaisesRegexp(TypeError, 'target must be a string'):
session.Session(37)
with self.assertRaisesRegexp(TypeError, 'config must be a tf.ConfigProto'):
session.Session(config=37)
with self.assertRaisesRegexp(TypeError, 'graph must be a tf.Graph'):
session.Session(graph=37)
@test_util.run_v1_only('b/120545219')
def testTimeoutWithShortOperations(self):
num_epochs = 5
q = data_flow_ops.FIFOQueue(capacity=50, dtypes=[dtypes.int32], shapes=[()])
enqueue_op = q.enqueue_many(constant_op.constant([1, 2]))
# Use a 10-second timeout, which should be longer than any
# non-blocking enqueue_many op.
config = config_pb2.ConfigProto(operation_timeout_in_ms=10000)
with session.Session(config=config) as sess:
for _ in range(num_epochs):
sess.run(enqueue_op)
self.assertEqual(sess.run(q.size()), num_epochs * 2)
@test_util.run_v1_only('b/120545219')
def testRegisterFetchAndFeedConversionFunctions(self):
class SquaredTensor(object):
def __init__(self, tensor):
self.sq = math_ops.square(tensor)
fetch_fn = lambda squared_tensor: ([squared_tensor.sq], lambda val: val[0])
feed_fn1 = lambda feed, feed_val: [(feed.sq, feed_val)]
feed_fn2 = lambda feed: [feed.sq]
session.register_session_run_conversion_functions(SquaredTensor, fetch_fn,
feed_fn1, feed_fn2)
with self.assertRaises(ValueError):
session.register_session_run_conversion_functions(SquaredTensor, fetch_fn,
feed_fn1, feed_fn2)
with self.cached_session() as sess:
np1 = np.array([1.0, 1.5, 2.0, 2.5])
np2 = np.array([3.0, 3.5, 4.0, 4.5])
squared_tensor = SquaredTensor(np2)
squared_eval = sess.run(squared_tensor)
self.assertAllClose(np2 * np2, squared_eval)
squared_eval = sess.run(
squared_tensor, feed_dict={
squared_tensor: np1 * np1
})
self.assertAllClose(np1 * np1, squared_eval)
partial_run = sess.partial_run_setup([squared_tensor], [])
squared_eval = sess.partial_run(partial_run, squared_tensor)
self.assertAllClose(np2 * np2, squared_eval)
@test_util.run_v1_only('b/120545219')
def testDefaultLogDevicePlacement(self):
class CaptureStderr(str):
"""Class to capture stderr from C++ shared library."""
def __enter__(self):
self._esc = compat.as_str('\b')
self._output = compat.as_str('')
self._stderr = sys.stderr
self._fd = self._stderr.fileno()
self._out_pipe, in_pipe = os.pipe()
# Save the original io stream.
self._dup_fd = os.dup(self._fd)
# Replace the original io stream with in pipe.
os.dup2(in_pipe, self._fd)
return self
def __exit__(self, *args):
self._stderr.write(self._esc)
self._stderr.flush()
self.read()
os.close(self._out_pipe)
# Restore the original io stream.
os.dup2(self._dup_fd, self._fd)
def read(self):
while True:
data = os.read(self._out_pipe, 1)
if not data or compat.as_str(data) == self._esc:
break
self._output += compat.as_str(data)
def __str__(self):
return self._output
# Passing the config to the server, but not the session should still result
# in logging device placement.
config = config_pb2.ConfigProto(log_device_placement=True)
server = server_lib.Server.create_local_server(config=config)
a = constant_op.constant(1)
b = constant_op.constant(2)
c = a + b
with session.Session(server.target) as sess:
with CaptureStderr() as log:
sess.run(c)
# Ensure that we did log device placement.
self.assertTrue('/job:local/replica:0/task:0/device:CPU:0' in str(log),
str(log))
@test_util.run_v1_only('b/120545219')
def testLocalMasterSessionTimeout(self):
# Test that the timeout passed in a config to the session works correctly.
config = config_pb2.ConfigProto(operation_timeout_in_ms=1000)
server = server_lib.Server.create_local_server()
q = data_flow_ops.FIFOQueue(1, dtypes.float32)
dequeued_t = q.dequeue()
with session.Session(server.target, config=config) as sess:
# Intentionally do not run any enqueue_ops so that dequeue will block
# until operation_timeout_in_ms.
with self.assertRaises(errors.DeadlineExceededError):
sess.run(dequeued_t)
@test_util.run_v1_only('b/120545219')
def testDefaultServerTimeout(self):
# Test that the default server config timeout gets used when no Session
# config is provided.
config = config_pb2.ConfigProto(operation_timeout_in_ms=1000)
server = server_lib.Server.create_local_server(config=config)
q = data_flow_ops.FIFOQueue(1, dtypes.float32)
dequeued_t = q.dequeue()
with session.Session(server.target) as sess:
# Intentionally do not run any enqueue_ops so that dequeue will block
# until operation_timeout_in_ms.
with self.assertRaises(errors.DeadlineExceededError):
sess.run(dequeued_t)
def runTestBuildGraphError(self, sess):
# Ensure that errors from building the graph get propagated.
data = array_ops.placeholder(dtypes.float32, shape=[])
# pylint: disable=protected-access
enter_1 = gen_control_flow_ops.enter(data, 'foo_1', False)
enter_2 = gen_control_flow_ops.enter(data, 'foo_2', False)
# pylint: enable=protected-access
res = math_ops.add(enter_1, enter_2)
with self.assertRaisesOpError('has inputs from different frames'):
sess.run(res, feed_dict={data: 1.0})
@test_util.run_v1_only('b/120545219')
def testBuildGraphErrorDirect(self):
self.runTestBuildGraphError(session.Session())
@test_util.run_v1_only('b/120545219')
def testBuildGraphErrorDist(self):
server = server_lib.Server.create_local_server()
self.runTestBuildGraphError(session.Session(server.target))
def testDeviceAttributes(self):
attrs = session._DeviceAttributes(
'/job:worker/replica:0/task:3/device:CPU:2', 'TYPE', 1337, 1000000)
self.assertEqual(1337, attrs.memory_limit_bytes)
self.assertEqual('/job:worker/replica:0/task:3/device:CPU:2', attrs.name)
self.assertEqual('TYPE', attrs.device_type)
self.assertEqual(1000000, attrs.incarnation)
str_repr = '%s' % attrs
self.assertTrue(str_repr.startswith('_DeviceAttributes'), str_repr)
def testDeviceAttributesCanonicalization(self):
attrs = session._DeviceAttributes('/job:worker/replica:0/task:3/cpu:1',
'TYPE', 1337, 1000000)
self.assertEqual(1337, attrs.memory_limit_bytes)
self.assertEqual('/job:worker/replica:0/task:3/device:CPU:1', attrs.name)
self.assertEqual('TYPE', attrs.device_type)
self.assertEqual(1000000, attrs.incarnation)
str_repr = '%s' % attrs
self.assertTrue(str_repr.startswith('_DeviceAttributes'), str_repr)
def runTestAddFunctionToSession(self, target=''):
"""Add a function to a session after the graph has already been run."""
@function.Defun(dtypes.float32)
def foo(x):
return x + 1
x = constant_op.constant(1.0)
with session.Session(target=target) as sess:
sess.run(x)
f = foo(x)
result = sess.run(f)
self.assertEqual(result, 2.0)
@test_util.run_v1_only('b/120545219')
def testAddFunctionToSession(self):
self.runTestAddFunctionToSession()
@test_util.run_v1_only('b/120545219')
def testAddFunctionToGrpcSession(self):
server = server_lib.Server.create_local_server()
self.runTestAddFunctionToSession(server.target)
def testOpenAndCloseGrpcSession(self):
server = server_lib.Server.create_local_server()
with session.Session(server.target):
pass
def testOpenAndCloseSession(self):
with session.Session():
pass
@test_util.run_v1_only('b/120545219')
def testAutoConvertAndCheckData(self):
with self.cached_session() as sess:
a = array_ops.placeholder(dtype=dtypes.string)
with self.assertRaisesRegexp(
TypeError, 'Type of feed value 1 with type <(\w+) \'int\'> is not'):
sess.run(a, feed_dict={a: 1})
if __name__ == '__main__':
googletest.main()
| {
"content_hash": "05ade63c100ba4ee81f1ceb1ef5eb847",
"timestamp": "",
"source": "github",
"line_count": 2030,
"max_line_length": 92,
"avg_line_length": 39.214778325123156,
"alnum_prop": 0.6252669396779137,
"repo_name": "hfp/tensorflow-xsmm",
"id": "c4a118a41406afc52586553b1d3f0b446005c46d",
"size": "80295",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/client/session_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4882"
},
{
"name": "Batchfile",
"bytes": "14734"
},
{
"name": "C",
"bytes": "523814"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "53558932"
},
{
"name": "CMake",
"bytes": "207176"
},
{
"name": "Dockerfile",
"bytes": "39024"
},
{
"name": "Go",
"bytes": "1303624"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "896901"
},
{
"name": "Jupyter Notebook",
"bytes": "2618412"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "75333"
},
{
"name": "Objective-C",
"bytes": "16140"
},
{
"name": "Objective-C++",
"bytes": "102889"
},
{
"name": "PHP",
"bytes": "12166"
},
{
"name": "Pascal",
"bytes": "221"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "43811576"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "502374"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
} |
"""Posterior/Prior predictive plot."""
import logging
import warnings
from numbers import Integral
import numpy as np
from ..labels import BaseLabeller
from ..sel_utils import xarray_var_iter
from ..rcparams import rcParams
from ..utils import _var_names
from .plot_utils import default_grid, filter_plotters_list, get_plotting_function
_log = logging.getLogger(__name__)
def plot_ppc(
data,
kind="kde",
alpha=None,
mean=True,
observed=True,
observed_rug=False,
color=None,
colors=None,
grid=None,
figsize=None,
textsize=None,
data_pairs=None,
var_names=None,
filter_vars=None,
coords=None,
flatten=None,
flatten_pp=None,
num_pp_samples=None,
random_seed=None,
jitter=None,
animated=False,
animation_kwargs=None,
legend=True,
labeller=None,
ax=None,
backend=None,
backend_kwargs=None,
group="posterior",
show=None,
):
"""
Plot for posterior/prior predictive checks.
Parameters
----------
data: az.InferenceData object
:class:`arviz.InferenceData` object containing the observed and posterior/prior
predictive data.
kind: str
Type of plot to display ("kde", "cumulative", or "scatter"). Defaults to `kde`.
alpha: float
Opacity of posterior/prior predictive density curves.
Defaults to 0.2 for ``kind = kde`` and cumulative, for scatter defaults to 0.7.
mean: bool
Whether or not to plot the mean posterior/prior predictive distribution.
Defaults to ``True``.
observed: bool, default True
Whether or not to plot the observed data.
observed: bool, default False
Whether or not to plot a rug plot for the observed data. Only valid if `observed` is
`True` and for kind `kde` or `cumulative`.
color: str
Valid matplotlib ``color``. Defaults to ``C0``.
color: list
List with valid matplotlib colors corresponding to the posterior/prior predictive
distribution, observed data and mean of the posterior/prior predictive distribution.
Defaults to ["C0", "k", "C1"].
grid : tuple
Number of rows and columns. Defaults to None, the rows and columns are
automatically inferred.
figsize: tuple
Figure size. If None, it will be defined automatically.
textsize: float
Text size scaling factor for labels, titles and lines. If None, it will be
autoscaled based on ``figsize``.
data_pairs: dict
Dictionary containing relations between observed data and posterior/prior predictive data.
Dictionary structure:
- key = data var_name
- value = posterior/prior predictive var_name
For example, ``data_pairs = {'y' : 'y_hat'}``
If None, it will assume that the observed data and the posterior/prior
predictive data have the same variable name.
var_names: list of variable names
Variables to be plotted, if `None` all variable are plotted. Prefix the
variables by ``~`` when you want to exclude them from the plot.
filter_vars: {None, "like", "regex"}, optional, default=None
If `None` (default), interpret var_names as the real variables names. If "like",
interpret var_names as substrings of the real variables names. If "regex",
interpret var_names as regular expressions on the real variables names. A la
``pandas.filter``.
coords: dict
Dictionary mapping dimensions to selected coordinates to be plotted.
Dimensions without a mapping specified will include all coordinates for
that dimension. Defaults to including all coordinates for all
dimensions if None.
flatten: list
List of dimensions to flatten in ``observed_data``. Only flattens across the coordinates
specified in the ``coords`` argument. Defaults to flattening all of the dimensions.
flatten_pp: list
List of dimensions to flatten in posterior_predictive/prior_predictive. Only flattens
across the coordinates specified in the ``coords`` argument. Defaults to flattening all
of the dimensions. Dimensions should match flatten excluding dimensions for ``data_pairs``
parameters. If ``flatten`` is defined and ``flatten_pp`` is None, then
``flatten_pp = flatten``.
num_pp_samples: int
The number of posterior/prior predictive samples to plot. For ``kind`` = 'scatter' and
``animation = False`` if defaults to a maximum of 5 samples and will set jitter to 0.7.
unless defined. Otherwise it defaults to all provided samples.
random_seed: int
Random number generator seed passed to ``numpy.random.seed`` to allow
reproducibility of the plot. By default, no seed will be provided
and the plot will change each call if a random sample is specified
by ``num_pp_samples``.
jitter: float
If ``kind`` is "scatter", jitter will add random uniform noise to the height
of the ppc samples and observed data. By default 0.
animated: bool
Create an animation of one posterior/prior predictive sample per frame.
Defaults to ``False``. Only works with matploblib backend.
To run animations inside a notebook you have to use the `nbAgg` matplotlib's backend.
Try with `%matplotlib notebook` or `%matplotlib nbAgg`. You can switch back to the
default matplotlib's backend with `%matplotlib inline` or `%matplotlib auto`.
If switching back and forth between matplotlib's backend, you may need to run twice the cell
with the animation.
If you experience problems rendering the animation try setting
`animation_kwargs({'blit':False}`) or changing the matplotlib's backend (e.g. to TkAgg)
If you run the animation from a script write `ax, ani = az.plot_ppc(.)`
animation_kwargs : dict
Keywords passed to :class:`matplotlib.animation.FuncAnimation`. Ignored with
matplotlib backend.
legend : bool
Add legend to figure. By default ``True``.
labeller : labeller instance, optional
Class providing the method ``make_pp_label`` to generate the labels in the plot titles.
Read the :ref:`label_guide` for more details and usage examples.
ax: numpy array-like of matplotlib axes or bokeh figures, optional
A 2D array of locations into which to plot the densities. If not supplied, Arviz will create
its own array of plot areas (and return it).
backend: str, optional
Select plotting backend {"matplotlib","bokeh"}. Default to "matplotlib".
backend_kwargs: bool, optional
These are kwargs specific to the backend being used, passed to
:func:`matplotlib.pyplot.subplots` or :func:`bokeh.plotting.figure`.
For additional documentation check the plotting method of the backend.
group: {"prior", "posterior"}, optional
Specifies which InferenceData group should be plotted. Defaults to 'posterior'.
Other value can be 'prior'.
show: bool, optional
Call backend show function.
Returns
-------
axes: matplotlib axes or bokeh figures
See Also
--------
plot_bpv: Plot Bayesian p-value for observed data and Posterior/Prior predictive.
plot_lm: Posterior predictive and mean plots for regression-like data.
plot_ppc: plot for posterior/prior predictive checks.
plot_ts: Plot timeseries data.
Examples
--------
Plot the observed data KDE overlaid on posterior predictive KDEs.
.. plot::
:context: close-figs
>>> import arviz as az
>>> data = az.load_arviz_data('radon')
>>> az.plot_ppc(data, data_pairs={"y":"y"})
Plot the overlay with empirical CDFs.
.. plot::
:context: close-figs
>>> az.plot_ppc(data, kind='cumulative')
Use the ``coords`` and ``flatten`` parameters to plot selected variable dimensions
across multiple plots. We will now modify the dimension ``obs_id`` to contain
indicate the name of the county where the measure was taken. The change has to
be done on both ``posterior_predictive`` and ``observed_data`` groups, which is
why we will use :meth:`~arviz.InferenceData.map` to apply the same function to
both groups. Afterwards, we will select the counties to be plotted with the
``coords`` arg.
.. plot::
:context: close-figs
>>> obs_county = data.posterior["County"][data.constant_data["county_idx"]]
>>> data = data.assign_coords(obs_id=obs_county, groups="observed_vars")
>>> az.plot_ppc(data, coords={'obs_id': ['ANOKA', 'BELTRAMI']}, flatten=[])
Plot the overlay using a stacked scatter plot that is particularly useful
when the sample sizes are small.
.. plot::
:context: close-figs
>>> az.plot_ppc(data, kind='scatter', flatten=[],
>>> coords={'obs_id': ['AITKIN', 'BELTRAMI']})
Plot random posterior predictive sub-samples.
.. plot::
:context: close-figs
>>> az.plot_ppc(data, num_pp_samples=30, random_seed=7)
"""
if group not in ("posterior", "prior"):
raise TypeError("`group` argument must be either `posterior` or `prior`")
for groups in (f"{group}_predictive", "observed_data"):
if not hasattr(data, groups):
raise TypeError(f'`data` argument must have the group "{groups}" for ppcplot')
if kind.lower() not in ("kde", "cumulative", "scatter"):
raise TypeError("`kind` argument must be either `kde`, `cumulative`, or `scatter`")
if colors is None:
colors = ["C0", "k", "C1"]
if isinstance(colors, str):
raise TypeError("colors should be a list with 3 items.")
if len(colors) != 3:
raise ValueError("colors should be a list with 3 items.")
if color is not None:
warnings.warn("color has been deprecated in favor of colors", FutureWarning)
colors[0] = color
if data_pairs is None:
data_pairs = {}
if backend is None:
backend = rcParams["plot.backend"]
backend = backend.lower()
if backend == "bokeh" and animated:
raise TypeError("Animation option is only supported with matplotlib backend.")
observed_data = data.observed_data
if group == "posterior":
predictive_dataset = data.posterior_predictive
elif group == "prior":
predictive_dataset = data.prior_predictive
if var_names is None:
var_names = list(observed_data.data_vars)
var_names = _var_names(var_names, observed_data, filter_vars)
pp_var_names = [data_pairs.get(var, var) for var in var_names]
pp_var_names = _var_names(pp_var_names, predictive_dataset, filter_vars)
if flatten_pp is None:
if flatten is None:
flatten_pp = list(predictive_dataset.dims.keys())
else:
flatten_pp = flatten
if flatten is None:
flatten = list(observed_data.dims.keys())
if coords is None:
coords = {}
else:
coords = coords.copy()
if labeller is None:
labeller = BaseLabeller()
if random_seed is not None:
np.random.seed(random_seed)
total_pp_samples = predictive_dataset.sizes["chain"] * predictive_dataset.sizes["draw"]
if num_pp_samples is None:
if kind == "scatter" and not animated:
num_pp_samples = min(5, total_pp_samples)
else:
num_pp_samples = total_pp_samples
if (
not isinstance(num_pp_samples, Integral)
or num_pp_samples < 1
or num_pp_samples > total_pp_samples
):
raise TypeError(f"`num_pp_samples` must be an integer between 1 and {total_pp_samples}.")
pp_sample_ix = np.random.choice(total_pp_samples, size=num_pp_samples, replace=False)
for key in coords.keys():
coords[key] = np.where(np.in1d(observed_data[key], coords[key]))[0]
obs_plotters = filter_plotters_list(
list(
xarray_var_iter(
observed_data.isel(coords),
skip_dims=set(flatten),
var_names=var_names,
combined=True,
)
),
"plot_ppc",
)
length_plotters = len(obs_plotters)
pp_plotters = [
tup
for _, tup in zip(
range(length_plotters),
xarray_var_iter(
predictive_dataset.isel(coords),
var_names=pp_var_names,
skip_dims=set(flatten_pp),
combined=True,
),
)
]
rows, cols = default_grid(length_plotters, grid=grid)
ppcplot_kwargs = dict(
ax=ax,
length_plotters=length_plotters,
rows=rows,
cols=cols,
figsize=figsize,
animated=animated,
obs_plotters=obs_plotters,
pp_plotters=pp_plotters,
predictive_dataset=predictive_dataset,
pp_sample_ix=pp_sample_ix,
kind=kind,
alpha=alpha,
colors=colors,
jitter=jitter,
textsize=textsize,
mean=mean,
observed=observed,
observed_rug=observed_rug,
total_pp_samples=total_pp_samples,
legend=legend,
labeller=labeller,
group=group,
animation_kwargs=animation_kwargs,
num_pp_samples=num_pp_samples,
backend_kwargs=backend_kwargs,
show=show,
)
# TODO: Add backend kwargs
plot = get_plotting_function("plot_ppc", "ppcplot", backend)
axes = plot(**ppcplot_kwargs)
return axes
| {
"content_hash": "265b3c6848c8197948798ec38b52967e",
"timestamp": "",
"source": "github",
"line_count": 362,
"max_line_length": 100,
"avg_line_length": 37.574585635359114,
"alnum_prop": 0.6432877518012057,
"repo_name": "arviz-devs/arviz",
"id": "18480edbe747e78d7200cef14fbb326db6bb7613",
"size": "13602",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "arviz/plots/ppcplot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5900"
},
{
"name": "Dockerfile",
"bytes": "1771"
},
{
"name": "HTML",
"bytes": "1343"
},
{
"name": "Jupyter Notebook",
"bytes": "641262"
},
{
"name": "Makefile",
"bytes": "688"
},
{
"name": "PowerShell",
"bytes": "2668"
},
{
"name": "Python",
"bytes": "1634423"
},
{
"name": "R",
"bytes": "248"
},
{
"name": "Shell",
"bytes": "7276"
},
{
"name": "TeX",
"bytes": "24620"
}
],
"symlink_target": ""
} |
'''Package info module
'''
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
with_statement,
)
import os
__author__ = 'soutys <soutys@example.com>'
__version__ = open(
os.path.join(os.path.dirname(__file__), 'VERSION'), 'r').read().strip()
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
__classifiers__ = [
'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
# 'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: System',
'Topic :: System :: Benchmark',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
'Topic :: Utilities',
]
# vim: ts=4:sw=4:et:fdm=indent:ff=unix
| {
"content_hash": "c26bb8c6d9c1c03fbada4be4e9a25296",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 75,
"avg_line_length": 30.977777777777778,
"alnum_prop": 0.6219512195121951,
"repo_name": "soutys/metricol",
"id": "743646168a03f7eb21f022b94c8a2e3272282c28",
"size": "1419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metricol/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "56685"
}
],
"symlink_target": ""
} |
from operator import not_
from docutils import nodes
from docutils.parsers.rst import directives, Directive
from pygments import highlight
from pygments.util import ClassNotFound
from pygments.lexers import get_lexer_by_name, guess_lexer, TextLexer
from pygments.formatters import HtmlFormatter
class CodeHighlight(Directive):
"""
ReStructured Text Highlight Code Directive.
Usage:
In your textarea, input lines like this or using markitup editor to add
markup lines into textarea.
.. code:: python
:linenos:
from django.conf import settings
class MyClass(object):
def __init__():
...
It will highlight code by pygments automatically.
"""
# define directive options
require_arguments = 0
optional_arguments = 1
final_argument_whitespace = True
option_spec = {
'linenos' : not_,
'style' : directives.unchanged,
'noclasses' : not_,
}
# define default options
defaults = {
'linenos' : 'table',
'style' : 'default',
'noclasses' : False,
}
controller = None
has_content = True
def run(self) :
text = u'\n'.join(self.content)
try :
# Use the corresponding lexer, if a language was specified.
lexer = get_lexer_by_name(self.arguments[0])
except IndexError :
try :
# No language was specified, so take an educated guess.
lexer = guess_lexer(text)
except ClassNotFound :
# That didn't work either, use a standard TextLexer; no
# highlighting.
lexer = TextLexer()
# Get current state from controller, supplied options.
overrides = {}
# overrides.update(self.controller.state)
overrides.update(self.options)
highlighted_text = highlight(text, lexer, HtmlFormatter(**overrides))
return [nodes.raw('', highlighted_text, format='html')]
| {
"content_hash": "87906bf9198ef984cb0755d2d8fe27bd",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 77,
"avg_line_length": 28.67142857142857,
"alnum_prop": 0.6163428001993024,
"repo_name": "indexofire/feincms-markup",
"id": "f001822895334f65487a107406ad48fcc7aef712",
"size": "2031",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "feincms_markup/extensions/restructuredtext/directives/code_highlight.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "58257"
},
{
"name": "Python",
"bytes": "12215"
}
],
"symlink_target": ""
} |
from telemetry.page import page
from telemetry.page import shared_page_state
from telemetry import story
class PluginPowerSaverPageSet(story.StorySet):
def __init__(self):
super(PluginPowerSaverPageSet, self).__init__(
archive_data_file='data/plugin_power_saver.json',
cloud_storage_bucket=story.PUBLIC_BUCKET)
self.AddStory(page.Page(
'http://a.tommycli.com/small_only.html',
page_set=self,
shared_page_state_class=shared_page_state.SharedDesktopPageState))
| {
"content_hash": "f21917b7f7f5d5aced357ea82fdeda53",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 74,
"avg_line_length": 36.5,
"alnum_prop": 0.7201565557729941,
"repo_name": "hujiajie/chromium-crosswalk",
"id": "ec3e6faa5c2edd554792cf770abeda8ddc53d646",
"size": "673",
"binary": false,
"copies": "21",
"ref": "refs/heads/master",
"path": "tools/perf/page_sets/plugin_power_saver.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
import os
import platform
import shutil
import tempfile
import unittest
import pykka
from mopidy import core
from mopidy.internal import deprecation
from mopidy.m3u.backend import M3UBackend
from mopidy.models import Playlist, Track
from tests import dummy_audio, path_to_data_dir
from tests.m3u import generate_song
class M3UPlaylistsProviderTest(unittest.TestCase):
backend_class = M3UBackend
config = {
'm3u': {
'enabled': True,
'base_dir': None,
'default_encoding': 'latin-1',
'default_extension': '.m3u',
'playlists_dir': path_to_data_dir(''),
}
}
def setUp(self): # noqa: N802
self.config['m3u']['playlists_dir'] = tempfile.mkdtemp()
self.playlists_dir = self.config['m3u']['playlists_dir']
self.base_dir = self.config['m3u']['base_dir'] or self.playlists_dir
audio = dummy_audio.create_proxy()
backend = M3UBackend.start(
config=self.config, audio=audio).proxy()
self.core = core.Core(backends=[backend])
def tearDown(self): # noqa: N802
pykka.ActorRegistry.stop_all()
if os.path.exists(self.playlists_dir):
shutil.rmtree(self.playlists_dir)
def test_created_playlist_is_persisted(self):
uri = 'm3u:test.m3u'
path = os.path.join(self.playlists_dir, b'test.m3u')
self.assertFalse(os.path.exists(path))
playlist = self.core.playlists.create('test')
self.assertEqual('test', playlist.name)
self.assertEqual(uri, playlist.uri)
self.assertTrue(os.path.exists(path))
def test_create_sanitizes_playlist_name(self):
playlist = self.core.playlists.create(' ../../test FOO baR ')
self.assertEqual('..|..|test FOO baR', playlist.name)
path = os.path.join(self.playlists_dir, b'..|..|test FOO baR.m3u')
self.assertEqual(self.playlists_dir, os.path.dirname(path))
self.assertTrue(os.path.exists(path))
def test_saved_playlist_is_persisted(self):
uri1 = 'm3u:test1.m3u'
uri2 = 'm3u:test2.m3u'
path1 = os.path.join(self.playlists_dir, b'test1.m3u')
path2 = os.path.join(self.playlists_dir, b'test2.m3u')
playlist = self.core.playlists.create('test1')
self.assertEqual('test1', playlist.name)
self.assertEqual(uri1, playlist.uri)
self.assertTrue(os.path.exists(path1))
self.assertFalse(os.path.exists(path2))
playlist = self.core.playlists.save(playlist.replace(name='test2'))
self.assertEqual('test2', playlist.name)
self.assertEqual(uri2, playlist.uri)
self.assertFalse(os.path.exists(path1))
self.assertTrue(os.path.exists(path2))
def test_deleted_playlist_is_removed(self):
uri = 'm3u:test.m3u'
path = os.path.join(self.playlists_dir, b'test.m3u')
self.assertFalse(os.path.exists(path))
playlist = self.core.playlists.create('test')
self.assertEqual('test', playlist.name)
self.assertEqual(uri, playlist.uri)
self.assertTrue(os.path.exists(path))
self.core.playlists.delete(playlist.uri)
self.assertFalse(os.path.exists(path))
def test_playlist_contents_is_written_to_disk(self):
track = Track(uri=generate_song(1))
playlist = self.core.playlists.create('test')
playlist = self.core.playlists.save(playlist.replace(tracks=[track]))
path = os.path.join(self.playlists_dir, b'test.m3u')
with open(path) as f:
contents = f.read()
self.assertEqual(track.uri, contents.strip())
def test_extended_playlist_contents_is_written_to_disk(self):
track = Track(uri=generate_song(1), name='Test', length=60000)
playlist = self.core.playlists.create('test')
playlist = self.core.playlists.save(playlist.replace(tracks=[track]))
path = os.path.join(self.playlists_dir, b'test.m3u')
with open(path) as f:
m3u = f.read().splitlines()
self.assertEqual(['#EXTM3U', '#EXTINF:-1,Test', track.uri], m3u)
def test_latin1_playlist_contents_is_written_to_disk(self):
track = Track(uri=generate_song(1), name='Test\x9f', length=60000)
playlist = self.core.playlists.create('test')
playlist = self.core.playlists.save(playlist.copy(tracks=[track]))
path = os.path.join(self.playlists_dir, b'test.m3u')
with open(path, 'rb') as f:
m3u = f.read().splitlines()
self.assertEqual([b'#EXTM3U', b'#EXTINF:-1,Test\x9f', track.uri], m3u)
def test_utf8_playlist_contents_is_replaced_and_written_to_disk(self):
track = Track(uri=generate_song(1), name='Test\u07b4', length=60000)
playlist = self.core.playlists.create('test')
playlist = self.core.playlists.save(playlist.copy(tracks=[track]))
path = os.path.join(self.playlists_dir, b'test.m3u')
with open(path, 'rb') as f:
m3u = f.read().splitlines()
self.assertEqual([b'#EXTM3U', b'#EXTINF:-1,Test?', track.uri], m3u)
def test_playlists_are_loaded_at_startup(self):
track = Track(uri='dummy:track:path2')
playlist = self.core.playlists.create('test')
playlist = playlist.replace(tracks=[track])
playlist = self.core.playlists.save(playlist)
self.assertEqual(len(self.core.playlists.as_list()), 1)
result = self.core.playlists.lookup(playlist.uri)
self.assertEqual(playlist.uri, result.uri)
self.assertEqual(playlist.name, result.name)
self.assertEqual(track.uri, result.tracks[0].uri)
def test_load_playlist_with_nonfilesystem_encoding_of_filename(self):
path = os.path.join(self.playlists_dir, 'øæå.m3u'.encode('latin-1'))
with open(path, 'wb+') as f:
f.write(b'#EXTM3U\n')
self.core.playlists.refresh()
self.assertEqual(len(self.core.playlists.as_list()), 1)
result = self.core.playlists.as_list()
if platform.system() == 'Darwin':
self.assertEqual('%F8%E6%E5', result[0].name)
else:
self.assertEqual('\ufffd\ufffd\ufffd', result[0].name)
@unittest.SkipTest
def test_playlists_dir_is_created(self):
pass
def test_create_returns_playlist_with_name_set(self):
playlist = self.core.playlists.create('test')
self.assertEqual(playlist.name, 'test')
def test_create_returns_playlist_with_uri_set(self):
playlist = self.core.playlists.create('test')
self.assert_(playlist.uri)
def test_create_adds_playlist_to_playlists_collection(self):
playlist = self.core.playlists.create('test')
playlists = self.core.playlists.as_list()
self.assertIn(playlist.uri, [ref.uri for ref in playlists])
def test_as_list_empty_to_start_with(self):
self.assertEqual(len(self.core.playlists.as_list()), 0)
def test_delete_non_existant_playlist(self):
self.core.playlists.delete('m3u:unknown')
def test_delete_playlist_removes_it_from_the_collection(self):
playlist = self.core.playlists.create('test')
self.assertEqual(playlist, self.core.playlists.lookup(playlist.uri))
self.core.playlists.delete(playlist.uri)
self.assertIsNone(self.core.playlists.lookup(playlist.uri))
def test_delete_playlist_without_file(self):
playlist = self.core.playlists.create('test')
self.assertEqual(playlist, self.core.playlists.lookup(playlist.uri))
path = os.path.join(self.playlists_dir, b'test.m3u')
self.assertTrue(os.path.exists(path))
os.remove(path)
self.assertFalse(os.path.exists(path))
self.core.playlists.delete(playlist.uri)
self.assertIsNone(self.core.playlists.lookup(playlist.uri))
def test_lookup_finds_playlist_by_uri(self):
original_playlist = self.core.playlists.create('test')
looked_up_playlist = self.core.playlists.lookup(original_playlist.uri)
self.assertEqual(original_playlist, looked_up_playlist)
def test_refresh(self):
playlist = self.core.playlists.create('test')
self.assertEqual(playlist, self.core.playlists.lookup(playlist.uri))
self.core.playlists.refresh()
self.assertEqual(playlist, self.core.playlists.lookup(playlist.uri))
def test_save_replaces_existing_playlist_with_updated_playlist(self):
playlist1 = self.core.playlists.create('test1')
self.assertEqual(playlist1, self.core.playlists.lookup(playlist1.uri))
playlist2 = playlist1.replace(name='test2')
playlist2 = self.core.playlists.save(playlist2)
self.assertIsNone(self.core.playlists.lookup(playlist1.uri))
self.assertEqual(playlist2, self.core.playlists.lookup(playlist2.uri))
def test_create_replaces_existing_playlist_with_updated_playlist(self):
track = Track(uri=generate_song(1))
playlist1 = self.core.playlists.create('test')
playlist1 = self.core.playlists.save(playlist1.replace(tracks=[track]))
self.assertEqual(playlist1, self.core.playlists.lookup(playlist1.uri))
playlist2 = self.core.playlists.create('test')
self.assertEqual(playlist1.uri, playlist2.uri)
self.assertNotEqual(
playlist1, self.core.playlists.lookup(playlist1.uri))
self.assertEqual(playlist2, self.core.playlists.lookup(playlist1.uri))
def test_save_playlist_with_new_uri(self):
uri = 'm3u:test.m3u'
self.core.playlists.save(Playlist(uri=uri))
path = os.path.join(self.playlists_dir, b'test.m3u')
self.assertTrue(os.path.exists(path))
def test_playlist_with_unknown_track(self):
track = Track(uri='file:///dev/null')
playlist = self.core.playlists.create('test')
playlist = playlist.replace(tracks=[track])
playlist = self.core.playlists.save(playlist)
self.assertEqual(len(self.core.playlists.as_list()), 1)
result = self.core.playlists.lookup('m3u:test.m3u')
self.assertEqual('m3u:test.m3u', result.uri)
self.assertEqual(playlist.name, result.name)
self.assertEqual(track.uri, result.tracks[0].uri)
def test_playlist_with_absolute_path(self):
track = Track(uri='/tmp/test.mp3')
filepath = b'/tmp/test.mp3'
playlist = self.core.playlists.create('test')
playlist = playlist.replace(tracks=[track])
playlist = self.core.playlists.save(playlist)
self.assertEqual(len(self.core.playlists.as_list()), 1)
result = self.core.playlists.lookup('m3u:test.m3u')
self.assertEqual('m3u:test.m3u', result.uri)
self.assertEqual(playlist.name, result.name)
self.assertEqual('file://' + filepath, result.tracks[0].uri)
def test_playlist_with_relative_path(self):
track = Track(uri='test.mp3')
filepath = os.path.join(self.base_dir, b'test.mp3')
playlist = self.core.playlists.create('test')
playlist = playlist.replace(tracks=[track])
playlist = self.core.playlists.save(playlist)
self.assertEqual(len(self.core.playlists.as_list()), 1)
result = self.core.playlists.lookup('m3u:test.m3u')
self.assertEqual('m3u:test.m3u', result.uri)
self.assertEqual(playlist.name, result.name)
self.assertEqual('file://' + filepath, result.tracks[0].uri)
def test_playlist_sort_order(self):
def check_order(playlists, names):
self.assertEqual(names, [playlist.name for playlist in playlists])
self.core.playlists.create('c')
self.core.playlists.create('a')
self.core.playlists.create('b')
check_order(self.core.playlists.as_list(), ['a', 'b', 'c'])
self.core.playlists.refresh()
check_order(self.core.playlists.as_list(), ['a', 'b', 'c'])
playlist = self.core.playlists.lookup('m3u:a.m3u')
playlist = playlist.replace(name='d')
playlist = self.core.playlists.save(playlist)
check_order(self.core.playlists.as_list(), ['b', 'c', 'd'])
self.core.playlists.delete('m3u:c.m3u')
check_order(self.core.playlists.as_list(), ['b', 'd'])
def test_get_items_returns_item_refs(self):
track = Track(uri='dummy:a', name='A', length=60000)
playlist = self.core.playlists.create('test')
playlist = self.core.playlists.save(playlist.replace(tracks=[track]))
item_refs = self.core.playlists.get_items(playlist.uri)
self.assertEqual(len(item_refs), 1)
self.assertEqual(item_refs[0].type, 'track')
self.assertEqual(item_refs[0].uri, 'dummy:a')
self.assertEqual(item_refs[0].name, 'A')
def test_get_items_of_unknown_playlist_returns_none(self):
item_refs = self.core.playlists.get_items('dummy:unknown')
self.assertIsNone(item_refs)
class M3UPlaylistsProviderBaseDirectoryTest(M3UPlaylistsProviderTest):
def setUp(self): # noqa: N802
self.config['m3u']['base_dir'] = tempfile.mkdtemp()
super(M3UPlaylistsProviderBaseDirectoryTest, self).setUp()
class DeprecatedM3UPlaylistsProviderTest(M3UPlaylistsProviderTest):
def run(self, result=None):
with deprecation.ignore(ids=['core.playlists.filter',
'core.playlists.filter:kwargs_criteria',
'core.playlists.get_playlists']):
return super(DeprecatedM3UPlaylistsProviderTest, self).run(result)
def test_filter_without_criteria(self):
self.assertEqual(self.core.playlists.get_playlists(),
self.core.playlists.filter())
def test_filter_with_wrong_criteria(self):
self.assertEqual([], self.core.playlists.filter(name='foo'))
def test_filter_with_right_criteria(self):
playlist = self.core.playlists.create('test')
playlists = self.core.playlists.filter(name='test')
self.assertEqual([playlist], playlists)
def test_filter_by_name_returns_single_match(self):
self.core.playlists.create('a')
playlist = self.core.playlists.create('b')
self.assertEqual([playlist], self.core.playlists.filter(name='b'))
def test_filter_by_name_returns_no_matches(self):
self.core.playlists.create('a')
self.core.playlists.create('b')
self.assertEqual([], self.core.playlists.filter(name='c'))
| {
"content_hash": "5f692efa5a6c0baede7cd5dabb1ae5ba",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 79,
"avg_line_length": 39.333333333333336,
"alnum_prop": 0.651439988976161,
"repo_name": "vrs01/mopidy",
"id": "e0ea1ce473948aa4a746fbcba8db79c7f74cd05a",
"size": "14536",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tests/m3u/test_playlists.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "610"
},
{
"name": "Groff",
"bytes": "573"
},
{
"name": "HTML",
"bytes": "805"
},
{
"name": "JavaScript",
"bytes": "82060"
},
{
"name": "Python",
"bytes": "1192583"
},
{
"name": "Shell",
"bytes": "556"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0013_auto_20150204_1642'),
]
operations = [
migrations.AlterField(
model_name='card',
name='hp',
field=models.PositiveIntegerField(null=True),
preserve_default=True,
),
]
| {
"content_hash": "6897262ce6ff6d145e062456bbe5b3f8",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 57,
"avg_line_length": 21.57894736842105,
"alnum_prop": 0.5853658536585366,
"repo_name": "SchoolIdolTomodachi/SchoolIdolAPI",
"id": "ec76bf85757ab001f4eaf0f9655b65bcf8a31ddf",
"size": "434",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "api/migrations/0014_auto_20150204_1643.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "67801"
},
{
"name": "HTML",
"bytes": "474730"
},
{
"name": "JavaScript",
"bytes": "93928"
},
{
"name": "Python",
"bytes": "748300"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import unittest
import hello_world
class HelloWorldTests(unittest.TestCase):
def test_hello_without_name(self):
self.assertEqual(
'Hello, World!',
hello_world.hello()
)
def test_hello_with_sample_name(self):
self.assertEqual(
'Hello, Alice!',
hello_world.hello('Alice')
)
def test_hello_with_other_sample_name(self):
self.assertEqual(
'Hello, Bob!',
hello_world.hello('Bob')
)
def test_hello_with_umlaut_name(self):
self.assertEqual(
'Hello, Jürgen!',
hello_world.hello('Jürgen')
)
def test_hello_with_blank_name(self):
self.assertEqual(
'Hello, World!',
hello_world.hello('')
)
def test_hello_with_none_name(self):
self.assertEqual(
'Hello, World!',
hello_world.hello(None)
)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "fb817ac22d58f6ebe3674d44f9a31147",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 48,
"avg_line_length": 22.106382978723403,
"alnum_prop": 0.534167468719923,
"repo_name": "christopher-demarco/exercism-exercises",
"id": "7fc4a4cf79ca78e62a6291345f3a0930c8617531",
"size": "1066",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "python/hello-world/hello_world_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20219"
}
],
"symlink_target": ""
} |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management import *
import status_params
# server configurations
config = Script.get_config()
hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
hive_server_conf_dir = "/etc/hive/conf.server"
hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
#users
hive_user = config['configurations']['global']['hive_user']
hive_lib = '/usr/lib/hive/lib/'
#JDBC driver jar name
hive_jdbc_driver = default('hive_jdbc_driver', 'com.mysql.jdbc.Driver')
if hive_jdbc_driver == "com.mysql.jdbc.Driver":
jdbc_jar_name = "mysql-connector-java.jar"
elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
jdbc_jar_name = "ojdbc6.jar"
check_db_connection_jar_name = "DBConnectionVerification.jar"
check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
#common
hive_metastore_port = config['configurations']['global']['hive_metastore_port']
hive_var_lib = '/var/lib/hive'
hive_server_host = config['clusterHostInfo']['hive_server_host']
hive_url = format("jdbc:hive2://{hive_server_host}:10000")
smokeuser = config['configurations']['global']['smokeuser']
smoke_test_sql = "/tmp/hiveserver2.sql"
smoke_test_path = "/tmp/hiveserver2Smoke.sh"
smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
security_enabled = config['configurations']['global']['security_enabled']
kinit_path_local = functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
hive_metastore_keytab_path = config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
#hive_env
hive_conf_dir = "/etc/hive/conf"
hive_dbroot = config['configurations']['global']['hive_dbroot']
hive_log_dir = config['configurations']['global']['hive_log_dir']
hive_pid_dir = status_params.hive_pid_dir
hive_pid = status_params.hive_pid
#hive-site
hive_database_name = config['configurations']['global']['hive_database_name']
#Starting hiveserver2
start_hiveserver2_script = 'startHiveserver2.sh'
hadoop_home = '/usr'
##Starting metastore
start_metastore_script = 'startMetastore.sh'
hive_metastore_pid = status_params.hive_metastore_pid
java_share_dir = '/usr/share/java'
driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
hdfs_user = config['configurations']['global']['hdfs_user']
user_group = config['configurations']['global']['user_group']
artifact_dir = "/tmp/HDP-artifacts/"
target = format("{hive_lib}/{jdbc_jar_name}")
jdk_location = config['ambariLevelParams']['jdk_location']
driver_curl_source = format("{jdk_location}/{jdbc_jar_name}")
start_hiveserver2_path = "/tmp/start_hiveserver2_script"
start_metastore_path = "/tmp/start_metastore_script"
hive_aux_jars_path = config['configurations']['global']['hive_aux_jars_path']
hadoop_heapsize = config['configurations']['global']['hadoop_heapsize']
java64_home = config['ambariLevelParams']['java_home']
##### MYSQL
db_name = config['configurations']['global']['hive_database_name']
mysql_user = "mysql"
mysql_group = 'mysql'
mysql_host = config['clusterHostInfo']['hive_mysql_host']
mysql_adduser_path = "/tmp/addMysqlUser.sh"
########## HCAT
hcat_conf_dir = '/etc/hcatalog/conf'
metastore_port = 9933
hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
hcat_dbroot = hcat_lib
hcat_user = config['configurations']['global']['hcat_user']
webhcat_user = config['configurations']['global']['webhcat_user']
hcat_pid_dir = status_params.hcat_pid_dir
hcat_log_dir = config['configurations']['global']['hcat_log_dir'] #hcat_log_dir
hadoop_conf_dir = '/etc/hadoop/conf'
| {
"content_hash": "32ca53a8fc41cedf5f728f8d4d930dd5",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 114,
"avg_line_length": 36.81967213114754,
"alnum_prop": 0.7424309884238647,
"repo_name": "sekikn/ambari",
"id": "c27f79bbcd93afa9b7d224aab8d5739a8fb2dd49",
"size": "4514",
"binary": false,
"copies": "4",
"ref": "refs/heads/trunk",
"path": "ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "22734"
},
{
"name": "C",
"bytes": "109499"
},
{
"name": "C#",
"bytes": "182799"
},
{
"name": "CSS",
"bytes": "616806"
},
{
"name": "CoffeeScript",
"bytes": "4323"
},
{
"name": "Dockerfile",
"bytes": "8117"
},
{
"name": "HTML",
"bytes": "3725781"
},
{
"name": "Handlebars",
"bytes": "1594385"
},
{
"name": "Java",
"bytes": "26670585"
},
{
"name": "JavaScript",
"bytes": "14647486"
},
{
"name": "Jinja",
"bytes": "147938"
},
{
"name": "Less",
"bytes": "303080"
},
{
"name": "Makefile",
"bytes": "2407"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLpgSQL",
"bytes": "298247"
},
{
"name": "PowerShell",
"bytes": "2047735"
},
{
"name": "Python",
"bytes": "7226684"
},
{
"name": "R",
"bytes": "1457"
},
{
"name": "Shell",
"bytes": "350773"
},
{
"name": "TSQL",
"bytes": "42351"
},
{
"name": "Vim Script",
"bytes": "5813"
},
{
"name": "sed",
"bytes": "1133"
}
],
"symlink_target": ""
} |
"""
PyChromecast: remote control your Chromecast
"""
from __future__ import print_function
import logging
import fnmatch
# pylint: disable=wildcard-import
import threading
from .config import * # noqa
from .error import * # noqa
from . import socket_client
from .discovery import discover_chromecasts
from .dial import get_device_status, reboot
from .controllers.media import STREAM_TYPE_BUFFERED # noqa
IDLE_APP_ID = 'E8C28D3C'
IGNORE_CEC = []
def _get_all_chromecasts(tries=None, retry_wait=None):
"""
Returns a list of all chromecasts on the network as PyChromecast
objects.
"""
hosts = discover_chromecasts()
cc_list = []
for ip_address, _ in hosts:
try:
cc_list.append(Chromecast(host=ip_address, tries=tries,
retry_wait=retry_wait))
except ChromecastConnectionError:
pass
return cc_list
def get_chromecasts(tries=None, retry_wait=None, **filters):
"""
Searches the network and returns a list of Chromecast objects.
Filter is a list of options to filter the chromecasts by.
ex: get_chromecasts(friendly_name="Living Room")
May return an empty list if no chromecasts were found matching
the filter criteria
Filters include DeviceStatus items:
friendly_name, model_name, manufacturer, api_version
Or AppStatus items:
app_id, description, state, service_url, service_protocols (list)
Or ip address:
ip
Tries is specified if you want to limit the number of times the
underlying socket associated with your Chromecast objects will
retry connecting if connection is lost or it fails to connect
in the first place. The number of seconds spent between each retry
can be defined by passing the retry_wait parameter, the default is
to wait 5 seconds.
"""
logger = logging.getLogger(__name__)
cc_list = set(_get_all_chromecasts(tries, retry_wait))
excluded_cc = set()
if not filters:
return list(cc_list)
if 'ip' in filters:
for chromecast in cc_list:
if chromecast.host != filters['ip']:
excluded_cc.add(chromecast)
filters.pop('ip')
for key, val in filters.items():
for chromecast in cc_list:
for tup in [chromecast.device, chromecast.status]:
if hasattr(tup, key) and val != getattr(tup, key):
excluded_cc.add(chromecast)
filtered_cc = cc_list - excluded_cc
for cast in excluded_cc:
logger.debug("Stopping excluded chromecast %s", cast)
cast.socket_client.stop.set()
return list(filtered_cc)
def get_chromecasts_as_dict(tries=None, retry_wait=None, **filters):
"""
Returns a dictionary of chromecasts with the friendly name as
the key. The value is the pychromecast object itself.
Tries is specified if you want to limit the number of times the
underlying socket associated with your Chromecast objects will
retry connecting if connection is lost or it fails to connect
in the first place. The number of seconds spent between each retry
can be defined by passing the retry_wait parameter, the default is
to wait 5 seconds.
"""
return {cc.device.friendly_name: cc
for cc in get_chromecasts(tries=tries, retry_wait=retry_wait,
**filters)}
def get_chromecast(strict=False, tries=None, retry_wait=None, **filters):
"""
Same as get_chromecasts but only if filter matches exactly one
ChromeCast.
Returns a Chromecast matching exactly the fitler specified.
If strict, return one and only one chromecast
Tries is specified if you want to limit the number of times the
underlying socket associated with your Chromecast objects will
retry connecting if connection is lost or it fails to connect
in the first place. The number of seconds spent between each retry
can be defined by passing the retry_wait parameter, the default is
to wait 5 seconds.
:type retry_wait: float or None
"""
# If we have filters or are operating in strict mode we have to scan
# for all Chromecasts to ensure there is only 1 matching chromecast.
# If no filters given and not strict just use the first dicsovered one.
if filters or strict:
results = get_chromecasts(tries=tries, retry_wait=retry_wait,
**filters)
else:
results = _get_all_chromecasts(tries, retry_wait)
if len(results) > 1:
if strict:
raise MultipleChromecastsFoundError(
'More than one Chromecast was found specifying '
'the filter criteria: {}'.format(filters))
else:
return results[0]
elif not results:
if strict:
raise NoChromecastFoundError(
'No Chromecasts matching filter critera were found:'
' {}'.format(filters))
else:
return None
else:
return results[0]
# pylint: disable=too-many-instance-attributes
class Chromecast(object):
"""
Class to interface with a ChromeCast.
:param tries: Number of retries to perform if the connection fails.
None for inifinite retries.
:param retry_wait: A floating point number specifying how many seconds to
wait between each retry. None means to use the default
which is 5 seconds.
"""
def __init__(self, host, tries=None, retry_wait=None):
self.logger = logging.getLogger(__name__)
# Resolve host to IP address
self.host = host
self.logger.info("Querying device status")
self.device = get_device_status(self.host)
if not self.device:
raise ChromecastConnectionError(
"Could not connect to {}".format(self.host))
self.status = None
self.status_event = threading.Event()
self.socket_client = socket_client.SocketClient(
host, tries, retry_wait=retry_wait)
receiver_controller = self.socket_client.receiver_controller
receiver_controller.register_status_listener(self)
# Forward these methods
self.set_volume = receiver_controller.set_volume
self.set_volume_muted = receiver_controller.set_volume_muted
self.play_media = self.socket_client.media_controller.play_media
self.register_handler = self.socket_client.register_handler
self.register_status_listener = \
receiver_controller.register_status_listener
self.register_launch_error_listener = \
receiver_controller.register_launch_error_listener
self.register_connection_listener = \
self.socket_client.register_connection_listener
self.socket_client.start()
@property
def ignore_cec(self):
""" Returns whether the CEC data should be ignored. """
return self.device is not None and \
any([fnmatch.fnmatchcase(self.device.friendly_name, pattern)
for pattern in IGNORE_CEC])
@property
def is_idle(self):
""" Returns if there is currently an app running. """
return (self.status is None or
self.app_id in (None, IDLE_APP_ID) or
(not self.status.is_active_input and not self.ignore_cec))
@property
def app_id(self):
""" Returns the current app_id. """
return self.status.app_id if self.status else None
@property
def app_display_name(self):
""" Returns the name of the current running app. """
return self.status.display_name if self.status else None
@property
def media_controller(self):
""" Returns the media controller. """
return self.socket_client.media_controller
def new_cast_status(self, status):
""" Called when a new status received from the Chromecast. """
self.status = status
if status:
self.status_event.set()
else:
self.status_event.clear()
def start_app(self, app_id):
""" Start an app on the Chromecast. """
self.logger.info("Starting app %s", app_id)
self.socket_client.receiver_controller.launch_app(app_id)
def quit_app(self):
""" Tells the Chromecast to quit current app_id. """
self.logger.info("Quiting current app")
self.socket_client.receiver_controller.stop_app()
def reboot(self):
""" Reboots the Chromecast. """
reboot(self.host)
def volume_up(self):
""" Increment volume by 0.1 unless it is already maxed.
Returns the new volume.
"""
volume = round(self.status.volume_level, 1)
return self.set_volume(volume + 0.1)
def volume_down(self):
""" Decrement the volume by 0.1 unless it is already 0.
Returns the new volume.
"""
volume = round(self.status.volume_level, 1)
return self.set_volume(volume - 0.1)
def wait(self, timeout=None):
"""
Waits until the cast device is ready for communication. The device
is ready as soon a status message has been received.
If the status has already been received then the method returns
immediately.
:param timeout: a floating point number specifying a timeout for the
operation in seconds (or fractions thereof). Or None
to block forever.
"""
self.status_event.wait(timeout=timeout)
def disconnect(self, timeout=None, blocking=True):
"""
Disconnects the chromecast and waits for it to terminate.
:param timeout: a floating point number specifying a timeout for the
operation in seconds (or fractions thereof). Or None
to block forever.
:param blocking: If True it will block until the disconnection is
complete, otherwise it will return immediately.
"""
self.socket_client.disconnect()
if blocking:
self.join(timeout=timeout)
def join(self, timeout=None):
"""
Blocks the thread of the caller until the chromecast connection is
stopped.
:param timeout: a floating point number specifying a timeout for the
operation in seconds (or fractions thereof). Or None
to block forever.
"""
self.socket_client.join(timeout=timeout)
def __del__(self):
self.socket_client.stop.set()
def __repr__(self):
return "Chromecast({}, {}, {}, {}, api={}.{})".format(
self.host, self.device.friendly_name, self.device.model_name,
self.device.manufacturer, self.device.api_version[0],
self.device.api_version[1])
| {
"content_hash": "fda225ecf1734d34df2eef7aa247055b",
"timestamp": "",
"source": "github",
"line_count": 318,
"max_line_length": 77,
"avg_line_length": 34.367924528301884,
"alnum_prop": 0.6331777838777565,
"repo_name": "kongseokhwan/kulcloud-prism-chromecast-agent",
"id": "c22f34f0a1c1d415d24ffd056aa0c052793a8cfb",
"size": "10929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pychromecast/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Protocol Buffer",
"bytes": "7702"
},
{
"name": "Python",
"bytes": "71665"
}
],
"symlink_target": ""
} |
"""
Django settings for actio_control project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (actio_control/config/settings/common.py - 3 = actio_control/)
APPS_DIR = ROOT_DIR.path('actio_control')
env = environ.Env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
# 'bootstrap_themes', # Twitter bootstrap
'rest_framework',
)
# Apps specific for this project go here.
LOCAL_APPS = (
# custom users app
'actio_control.users.apps.UsersConfig',
# Your stuff: custom apps go here
'portal.apps.PortalConfig',
'api.apps.ApiConfig',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'actio_control.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""Pedro Bazan""", 'pedrob@merxbp.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
'default': env.db('DATABASE_URL', default='postgres:///actio_control'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'UTC'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# See: http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
# 'allauth.account.auth_backends.AuthenticationBackend',
'portal.backends.ActioBackend',
)
# # Some really nice defaults
# ACCOUNT_AUTHENTICATION_METHOD = 'username'
# ACCOUNT_EMAIL_REQUIRED = True
# ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
# ACCOUNT_ADAPTER = 'actio_control.users.adapters.AccountAdapter'
# SOCIALACCOUNT_ADAPTER = 'actio_control.users.adapters.SocialAccountAdapter'
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = 'portal:login'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# django-compressor
# ------------------------------------------------------------------------------
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
| {
"content_hash": "5629fac629a12eaa712dce00e42aa111",
"timestamp": "",
"source": "github",
"line_count": 242,
"max_line_length": 103,
"avg_line_length": 35.888429752066116,
"alnum_prop": 0.6078295912492804,
"repo_name": "betobaz/django_docker",
"id": "4e996d4ac0037af7612845859f9d3eb98e8f58e3",
"size": "8709",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/settings/common.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5723"
},
{
"name": "HTML",
"bytes": "21329"
},
{
"name": "JavaScript",
"bytes": "5429"
},
{
"name": "Nginx",
"bytes": "1105"
},
{
"name": "Python",
"bytes": "72647"
},
{
"name": "Shell",
"bytes": "7508"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from .models import UrlconfRevision
class UrlconfAdmin(admin.ModelAdmin):
list_display = (
'revision',
)
admin.site.register(UrlconfRevision, UrlconfAdmin)
| {
"content_hash": "0271315a3c2bb04a732f8442b2e4d1e4",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 56,
"avg_line_length": 19.142857142857142,
"alnum_prop": 0.7425373134328358,
"repo_name": "aldryn/aldryn-apphook-reload",
"id": "da43ccab5d7aa09fe94a1101402f683e7b947824",
"size": "292",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "aldryn_apphook_reload/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "9171"
}
],
"symlink_target": ""
} |
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.aiplatform_v1beta1.types import study
from google.cloud.aiplatform_v1beta1.types import study as gca_study
from google.cloud.aiplatform_v1beta1.types import vizier_service
from google.cloud.location import locations_pb2 # type: ignore
from google.iam.v1 import iam_policy_pb2 # type: ignore
from google.iam.v1 import policy_pb2 # type: ignore
from google.longrunning import operations_pb2
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import VizierServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import VizierServiceGrpcTransport
class VizierServiceGrpcAsyncIOTransport(VizierServiceTransport):
"""gRPC AsyncIO backend transport for VizierService.
Vertex AI Vizier API.
Vertex AI Vizier is a service to solve blackbox optimization
problems, such as tuning machine learning hyperparameters and
searching over deep learning architectures.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "aiplatform.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "aiplatform.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
api_audience=api_audience,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def create_study(
self,
) -> Callable[[vizier_service.CreateStudyRequest], Awaitable[gca_study.Study]]:
r"""Return a callable for the create study method over gRPC.
Creates a Study. A resource name will be generated
after creation of the Study.
Returns:
Callable[[~.CreateStudyRequest],
Awaitable[~.Study]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_study" not in self._stubs:
self._stubs["create_study"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/CreateStudy",
request_serializer=vizier_service.CreateStudyRequest.serialize,
response_deserializer=gca_study.Study.deserialize,
)
return self._stubs["create_study"]
@property
def get_study(
self,
) -> Callable[[vizier_service.GetStudyRequest], Awaitable[study.Study]]:
r"""Return a callable for the get study method over gRPC.
Gets a Study by name.
Returns:
Callable[[~.GetStudyRequest],
Awaitable[~.Study]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_study" not in self._stubs:
self._stubs["get_study"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/GetStudy",
request_serializer=vizier_service.GetStudyRequest.serialize,
response_deserializer=study.Study.deserialize,
)
return self._stubs["get_study"]
@property
def list_studies(
self,
) -> Callable[
[vizier_service.ListStudiesRequest],
Awaitable[vizier_service.ListStudiesResponse],
]:
r"""Return a callable for the list studies method over gRPC.
Lists all the studies in a region for an associated
project.
Returns:
Callable[[~.ListStudiesRequest],
Awaitable[~.ListStudiesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_studies" not in self._stubs:
self._stubs["list_studies"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/ListStudies",
request_serializer=vizier_service.ListStudiesRequest.serialize,
response_deserializer=vizier_service.ListStudiesResponse.deserialize,
)
return self._stubs["list_studies"]
@property
def delete_study(
self,
) -> Callable[[vizier_service.DeleteStudyRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete study method over gRPC.
Deletes a Study.
Returns:
Callable[[~.DeleteStudyRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_study" not in self._stubs:
self._stubs["delete_study"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/DeleteStudy",
request_serializer=vizier_service.DeleteStudyRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_study"]
@property
def lookup_study(
self,
) -> Callable[[vizier_service.LookupStudyRequest], Awaitable[study.Study]]:
r"""Return a callable for the lookup study method over gRPC.
Looks a study up using the user-defined display_name field
instead of the fully qualified resource name.
Returns:
Callable[[~.LookupStudyRequest],
Awaitable[~.Study]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "lookup_study" not in self._stubs:
self._stubs["lookup_study"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/LookupStudy",
request_serializer=vizier_service.LookupStudyRequest.serialize,
response_deserializer=study.Study.deserialize,
)
return self._stubs["lookup_study"]
@property
def suggest_trials(
self,
) -> Callable[
[vizier_service.SuggestTrialsRequest], Awaitable[operations_pb2.Operation]
]:
r"""Return a callable for the suggest trials method over gRPC.
Adds one or more Trials to a Study, with parameter values
suggested by Vertex AI Vizier. Returns a long-running operation
associated with the generation of Trial suggestions. When this
long-running operation succeeds, it will contain a
[SuggestTrialsResponse][google.cloud.ml.v1.SuggestTrialsResponse].
Returns:
Callable[[~.SuggestTrialsRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "suggest_trials" not in self._stubs:
self._stubs["suggest_trials"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/SuggestTrials",
request_serializer=vizier_service.SuggestTrialsRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["suggest_trials"]
@property
def create_trial(
self,
) -> Callable[[vizier_service.CreateTrialRequest], Awaitable[study.Trial]]:
r"""Return a callable for the create trial method over gRPC.
Adds a user provided Trial to a Study.
Returns:
Callable[[~.CreateTrialRequest],
Awaitable[~.Trial]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_trial" not in self._stubs:
self._stubs["create_trial"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/CreateTrial",
request_serializer=vizier_service.CreateTrialRequest.serialize,
response_deserializer=study.Trial.deserialize,
)
return self._stubs["create_trial"]
@property
def get_trial(
self,
) -> Callable[[vizier_service.GetTrialRequest], Awaitable[study.Trial]]:
r"""Return a callable for the get trial method over gRPC.
Gets a Trial.
Returns:
Callable[[~.GetTrialRequest],
Awaitable[~.Trial]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_trial" not in self._stubs:
self._stubs["get_trial"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/GetTrial",
request_serializer=vizier_service.GetTrialRequest.serialize,
response_deserializer=study.Trial.deserialize,
)
return self._stubs["get_trial"]
@property
def list_trials(
self,
) -> Callable[
[vizier_service.ListTrialsRequest], Awaitable[vizier_service.ListTrialsResponse]
]:
r"""Return a callable for the list trials method over gRPC.
Lists the Trials associated with a Study.
Returns:
Callable[[~.ListTrialsRequest],
Awaitable[~.ListTrialsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_trials" not in self._stubs:
self._stubs["list_trials"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/ListTrials",
request_serializer=vizier_service.ListTrialsRequest.serialize,
response_deserializer=vizier_service.ListTrialsResponse.deserialize,
)
return self._stubs["list_trials"]
@property
def add_trial_measurement(
self,
) -> Callable[[vizier_service.AddTrialMeasurementRequest], Awaitable[study.Trial]]:
r"""Return a callable for the add trial measurement method over gRPC.
Adds a measurement of the objective metrics to a
Trial. This measurement is assumed to have been taken
before the Trial is complete.
Returns:
Callable[[~.AddTrialMeasurementRequest],
Awaitable[~.Trial]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "add_trial_measurement" not in self._stubs:
self._stubs["add_trial_measurement"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/AddTrialMeasurement",
request_serializer=vizier_service.AddTrialMeasurementRequest.serialize,
response_deserializer=study.Trial.deserialize,
)
return self._stubs["add_trial_measurement"]
@property
def complete_trial(
self,
) -> Callable[[vizier_service.CompleteTrialRequest], Awaitable[study.Trial]]:
r"""Return a callable for the complete trial method over gRPC.
Marks a Trial as complete.
Returns:
Callable[[~.CompleteTrialRequest],
Awaitable[~.Trial]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "complete_trial" not in self._stubs:
self._stubs["complete_trial"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/CompleteTrial",
request_serializer=vizier_service.CompleteTrialRequest.serialize,
response_deserializer=study.Trial.deserialize,
)
return self._stubs["complete_trial"]
@property
def delete_trial(
self,
) -> Callable[[vizier_service.DeleteTrialRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete trial method over gRPC.
Deletes a Trial.
Returns:
Callable[[~.DeleteTrialRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_trial" not in self._stubs:
self._stubs["delete_trial"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/DeleteTrial",
request_serializer=vizier_service.DeleteTrialRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_trial"]
@property
def check_trial_early_stopping_state(
self,
) -> Callable[
[vizier_service.CheckTrialEarlyStoppingStateRequest],
Awaitable[operations_pb2.Operation],
]:
r"""Return a callable for the check trial early stopping
state method over gRPC.
Checks whether a Trial should stop or not. Returns a
long-running operation. When the operation is successful, it
will contain a
[CheckTrialEarlyStoppingStateResponse][google.cloud.ml.v1.CheckTrialEarlyStoppingStateResponse].
Returns:
Callable[[~.CheckTrialEarlyStoppingStateRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "check_trial_early_stopping_state" not in self._stubs:
self._stubs[
"check_trial_early_stopping_state"
] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/CheckTrialEarlyStoppingState",
request_serializer=vizier_service.CheckTrialEarlyStoppingStateRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["check_trial_early_stopping_state"]
@property
def stop_trial(
self,
) -> Callable[[vizier_service.StopTrialRequest], Awaitable[study.Trial]]:
r"""Return a callable for the stop trial method over gRPC.
Stops a Trial.
Returns:
Callable[[~.StopTrialRequest],
Awaitable[~.Trial]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "stop_trial" not in self._stubs:
self._stubs["stop_trial"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/StopTrial",
request_serializer=vizier_service.StopTrialRequest.serialize,
response_deserializer=study.Trial.deserialize,
)
return self._stubs["stop_trial"]
@property
def list_optimal_trials(
self,
) -> Callable[
[vizier_service.ListOptimalTrialsRequest],
Awaitable[vizier_service.ListOptimalTrialsResponse],
]:
r"""Return a callable for the list optimal trials method over gRPC.
Lists the pareto-optimal Trials for multi-objective Study or the
optimal Trials for single-objective Study. The definition of
pareto-optimal can be checked in wiki page.
https://en.wikipedia.org/wiki/Pareto_efficiency
Returns:
Callable[[~.ListOptimalTrialsRequest],
Awaitable[~.ListOptimalTrialsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_optimal_trials" not in self._stubs:
self._stubs["list_optimal_trials"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1beta1.VizierService/ListOptimalTrials",
request_serializer=vizier_service.ListOptimalTrialsRequest.serialize,
response_deserializer=vizier_service.ListOptimalTrialsResponse.deserialize,
)
return self._stubs["list_optimal_trials"]
def close(self):
return self.grpc_channel.close()
@property
def delete_operation(
self,
) -> Callable[[operations_pb2.DeleteOperationRequest], None]:
r"""Return a callable for the delete_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_operation" not in self._stubs:
self._stubs["delete_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/DeleteOperation",
request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString,
response_deserializer=None,
)
return self._stubs["delete_operation"]
@property
def cancel_operation(
self,
) -> Callable[[operations_pb2.CancelOperationRequest], None]:
r"""Return a callable for the cancel_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
)
return self._stubs["cancel_operation"]
@property
def wait_operation(
self,
) -> Callable[[operations_pb2.WaitOperationRequest], None]:
r"""Return a callable for the wait_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_operation" not in self._stubs:
self._stubs["wait_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/WaitOperation",
request_serializer=operations_pb2.WaitOperationRequest.SerializeToString,
response_deserializer=None,
)
return self._stubs["wait_operation"]
@property
def get_operation(
self,
) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]:
r"""Return a callable for the get_operation method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
self._stubs["get_operation"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["get_operation"]
@property
def list_operations(
self,
) -> Callable[
[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse
]:
r"""Return a callable for the list_operations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
self._stubs["list_operations"] = self.grpc_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
)
return self._stubs["list_operations"]
@property
def list_locations(
self,
) -> Callable[
[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse
]:
r"""Return a callable for the list locations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_locations" not in self._stubs:
self._stubs["list_locations"] = self.grpc_channel.unary_unary(
"/google.cloud.location.Locations/ListLocations",
request_serializer=locations_pb2.ListLocationsRequest.SerializeToString,
response_deserializer=locations_pb2.ListLocationsResponse.FromString,
)
return self._stubs["list_locations"]
@property
def get_location(
self,
) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]:
r"""Return a callable for the list locations method over gRPC."""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_location" not in self._stubs:
self._stubs["get_location"] = self.grpc_channel.unary_unary(
"/google.cloud.location.Locations/GetLocation",
request_serializer=locations_pb2.GetLocationRequest.SerializeToString,
response_deserializer=locations_pb2.Location.FromString,
)
return self._stubs["get_location"]
@property
def set_iam_policy(
self,
) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the set iam policy method over gRPC.
Sets the IAM access control policy on the specified
function. Replaces any existing policy.
Returns:
Callable[[~.SetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "set_iam_policy" not in self._stubs:
self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary(
"/google.iam.v1.IAMPolicy/SetIamPolicy",
request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["set_iam_policy"]
@property
def get_iam_policy(
self,
) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]:
r"""Return a callable for the get iam policy method over gRPC.
Gets the IAM access control policy for a function.
Returns an empty policy if the function exists and does
not have a policy set.
Returns:
Callable[[~.GetIamPolicyRequest],
~.Policy]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_iam_policy" not in self._stubs:
self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary(
"/google.iam.v1.IAMPolicy/GetIamPolicy",
request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString,
response_deserializer=policy_pb2.Policy.FromString,
)
return self._stubs["get_iam_policy"]
@property
def test_iam_permissions(
self,
) -> Callable[
[iam_policy_pb2.TestIamPermissionsRequest],
iam_policy_pb2.TestIamPermissionsResponse,
]:
r"""Return a callable for the test iam permissions method over gRPC.
Tests the specified permissions against the IAM access control
policy for a function. If the function does not exist, this will
return an empty set of permissions, not a NOT_FOUND error.
Returns:
Callable[[~.TestIamPermissionsRequest],
~.TestIamPermissionsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "test_iam_permissions" not in self._stubs:
self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary(
"/google.iam.v1.IAMPolicy/TestIamPermissions",
request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString,
response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString,
)
return self._stubs["test_iam_permissions"]
__all__ = ("VizierServiceGrpcAsyncIOTransport",)
| {
"content_hash": "827981a7eb89b9a2b97941d3a99a3b29",
"timestamp": "",
"source": "github",
"line_count": 877,
"max_line_length": 104,
"avg_line_length": 44.14025085518814,
"alnum_prop": 0.6228462194208364,
"repo_name": "googleapis/python-aiplatform",
"id": "2b458b2d3d8051abff952ddc1bab8437273de003",
"size": "39311",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/aiplatform_v1beta1/services/vizier_service/transports/grpc_asyncio.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "23977004"
},
{
"name": "Shell",
"bytes": "30668"
}
],
"symlink_target": ""
} |
"""
.. _tut_preprocessing_ica:
Compute ICA on MEG data and remove artifacts
============================================
ICA is fit to MEG raw data.
The sources matching the ECG and EOG are automatically found and displayed.
Subsequently, artifact detection and rejection quality are assessed.
"""
# Authors: Denis Engemann <denis.engemann@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
#
# License: BSD (3-clause)
import numpy as np
import mne
from mne.preprocessing import ICA
from mne.preprocessing import create_ecg_epochs, create_eog_epochs
from mne.datasets import sample
###############################################################################
# Setup paths and prepare raw data
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
raw = mne.io.read_raw_fif(raw_fname, preload=True, add_eeg_ref=False)
raw.filter(1, 45, n_jobs=1, l_trans_bandwidth=0.5, h_trans_bandwidth=0.5,
filter_length='10s', phase='zero-double')
###############################################################################
# 1) Fit ICA model using the FastICA algorithm
# Other available choices are `infomax` or `extended-infomax`
# We pass a float value between 0 and 1 to select n_components based on the
# percentage of variance explained by the PCA components.
ica = ICA(n_components=0.95, method='fastica')
picks = mne.pick_types(raw.info, meg=True, eeg=False, eog=False,
stim=False, exclude='bads')
ica.fit(raw, picks=picks, decim=3, reject=dict(mag=4e-12, grad=4000e-13))
# maximum number of components to reject
n_max_ecg, n_max_eog = 3, 1 # here we don't expect horizontal EOG components
###############################################################################
# 2) identify bad components by analyzing latent sources.
title = 'Sources related to %s artifacts (red)'
# generate ECG epochs use detection via phase statistics
ecg_epochs = create_ecg_epochs(raw, tmin=-.5, tmax=.5, picks=picks)
ecg_inds, scores = ica.find_bads_ecg(ecg_epochs, method='ctps')
ica.plot_scores(scores, exclude=ecg_inds, title=title % 'ecg', labels='ecg')
show_picks = np.abs(scores).argsort()[::-1][:5]
ica.plot_sources(raw, show_picks, exclude=ecg_inds, title=title % 'ecg')
ica.plot_components(ecg_inds, title=title % 'ecg', colorbar=True)
ecg_inds = ecg_inds[:n_max_ecg]
ica.exclude += ecg_inds
# detect EOG by correlation
eog_inds, scores = ica.find_bads_eog(raw)
ica.plot_scores(scores, exclude=eog_inds, title=title % 'eog', labels='eog')
show_picks = np.abs(scores).argsort()[::-1][:5]
ica.plot_sources(raw, show_picks, exclude=eog_inds, title=title % 'eog')
ica.plot_components(eog_inds, title=title % 'eog', colorbar=True)
eog_inds = eog_inds[:n_max_eog]
ica.exclude += eog_inds
###############################################################################
# 3) Assess component selection and unmixing quality
# estimate average artifact
ecg_evoked = ecg_epochs.average()
ica.plot_sources(ecg_evoked, exclude=ecg_inds) # plot ECG sources + selection
ica.plot_overlay(ecg_evoked, exclude=ecg_inds) # plot ECG cleaning
eog_evoked = create_eog_epochs(raw, tmin=-.5, tmax=.5, picks=picks).average()
ica.plot_sources(eog_evoked, exclude=eog_inds) # plot EOG sources + selection
ica.plot_overlay(eog_evoked, exclude=eog_inds) # plot EOG cleaning
# check the amplitudes do not change
ica.plot_overlay(raw) # EOG artifacts remain
###############################################################################
# To save an ICA solution you can say:
# ica.save('my_ica.fif')
# You can later load the solution by saying:
# from mne.preprocessing import read_ica
# read_ica('my_ica.fif')
# Apply the solution to Raw, Epochs or Evoked like this:
# ica.apply(epochs)
| {
"content_hash": "9dbf0eaad648bd83d312cb9deb3fe241",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 79,
"avg_line_length": 35.25,
"alnum_prop": 0.6411872865773575,
"repo_name": "jmontoyam/mne-python",
"id": "4b466ad2e987dc8bdafb5db487ffc8a4f134842a",
"size": "3807",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tutorials/plot_ica_from_raw.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "69806"
},
{
"name": "Makefile",
"bytes": "3679"
},
{
"name": "Python",
"bytes": "5539709"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.payload import Payload
from pants.base.payload_field import PayloadField, PrimitiveField, combine_hashes, stable_json_sha1
from pants.build_graph.target import Target
# File copied from `pants.backend.docgen.targets.doc`
class WikiArtifact(object):
"""Binds a single documentation page to a wiki instance.
This object allows you to specify which wiki a page should be published to, along with additional
wiki-specific parameters, such as the title, parent page, etc.
"""
def __init__(self, wiki, **kwargs):
"""
:param wiki: target spec of a ``wiki``.
:param kwargs: a dictionary that may contain configuration directives for your particular wiki.
For example, the following keys are supported for Atlassian's Confluence:
* ``space`` -- A wiki space in which to place the page (used in Confluence)
* ``title`` -- A title for the wiki page
* ``parent`` -- The title of a wiki page that will denote this page as a child.
"""
self.wiki = wiki
self.config = kwargs
def fingerprint(self):
return combine_hashes([self.wiki.fingerprint(), stable_json_sha1(self.config)])
class Wiki(object):
"""Identifies a wiki where pages can be published."""
def __init__(self, name, url_builder):
"""
:param url_builder: Function that accepts a page target and an optional wiki config dict.
"""
self.name = name
self.url_builder = url_builder
def fingerprint(self):
# TODO: url_builder is not a part of fingerprint.
return stable_json_sha1(self.name)
class Page(Target):
"""A documentation page.
Here is an example, that shows a markdown page providing a wiki page on an Atlassian Confluence
wiki: ::
page(name='mypage',
source='mypage.md',
provides=[
wiki_artifact(wiki=Wiki('foozle', <url builder>),
space='my_space',
title='my_page',
parent='my_parent'),
],
)
A ``page`` can have more than one ``wiki_artifact`` in its ``provides``
(there might be more than one place to publish it).
"""
class ProvidesTupleField(tuple, PayloadField):
def _compute_fingerprint(self):
return combine_hashes(artifact.fingerprint() for artifact in self)
def __init__(self,
address=None,
payload=None,
source=None,
format=None,
links=None,
provides=None,
**kwargs):
"""
:param source: Path to page source file.
:param format: Page's format, ``md`` or ``rst``. By default, Pants infers from ``source`` file
extension: ``.rst`` is ReStructured Text; anything else is Markdown.
:param links: Other ``page`` targets that this `page` links to.
:type links: List of target specs
:param provides: Optional "Addresses" at which this page is published.
E.g., a wiki location.
:type provides: List of ``wiki_artifact``s
"""
payload = payload or Payload()
if not format:
if source and source.lower().endswith('.rst'):
format = 'rst'
else:
format = 'md'
payload.add_fields({
'sources': self.create_sources_field(sources=[source],
sources_rel_path=address.spec_path,
key_arg='sources'),
'format': PrimitiveField(format),
'links': PrimitiveField(links or []),
'provides': self.ProvidesTupleField(provides or []),
})
super(Page, self).__init__(address=address, payload=payload, **kwargs)
if provides and not isinstance(provides[0], WikiArtifact):
raise ValueError('Page must provide a wiki_artifact. Found instead: {}'.format(provides))
@property
def source(self):
"""The first (and only) source listed by this Page."""
return list(self.payload.sources.source_paths)[0]
@classmethod
def compute_injectable_specs(cls, kwargs=None, payload=None):
for spec in super(Page, cls).compute_injectable_specs(kwargs, payload):
yield spec
target_representation = kwargs or payload.as_dict()
for spec in target_representation.get('links', []):
yield spec
@property
def provides(self):
"""A tuple of WikiArtifact instances provided by this Page.
Notably different from JvmTarget.provides, which has only a single Artifact rather than a
list.
"""
return self.payload.provides
@property
def format(self):
"""Returns this page's format, 'md' (Markdown) or 'rst' (ReStructured Text)."""
return self.payload.format
| {
"content_hash": "64e8267cf5e5fe0b16f1a00317621039",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 99,
"avg_line_length": 35.029411764705884,
"alnum_prop": 0.6400083963056256,
"repo_name": "fkorotkov/pants",
"id": "193380bff5573c867ddf532aea244e5fbf59a95c",
"size": "4911",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "contrib/confluence/src/python/pants/contrib/confluence/targets/doc_page.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "1805"
},
{
"name": "HTML",
"bytes": "79866"
},
{
"name": "Java",
"bytes": "481460"
},
{
"name": "JavaScript",
"bytes": "35417"
},
{
"name": "Python",
"bytes": "5931594"
},
{
"name": "Rust",
"bytes": "271643"
},
{
"name": "Scala",
"bytes": "76239"
},
{
"name": "Shell",
"bytes": "74734"
},
{
"name": "Thrift",
"bytes": "2795"
}
],
"symlink_target": ""
} |
import logging
import os
import subprocess
import time
from pathlib import Path
from pkg_resources import resource_filename
from mapdamage.version import __version__
def misincorporation_plot(options):
folder = options.folder.absolute()
fmut = folder / "misincorporation.txt"
fcomp = folder / "dnacomp.txt"
output = folder / "Fragmisincorporation_plot.pdf"
logger = logging.getLogger(__name__)
logger.info("Saving misincorporation plot to '%s'", output)
return _rscript_call(
Path("mapDamage.r"),
COMP=fcomp,
PDFOUT=output,
AROUND=options.refplot,
MISINCORP=fmut,
LENGTH=options.readplot,
YMAX=options.ymax,
FOLDER=folder,
TITLE=options.title,
VERSION=__version__,
)
def length_distribution_plot(options):
"""optional length distribution and cumulative C>T mutations plots, per strand"""
folder = options.folder.absolute()
fmut = folder / "misincorporation.txt"
flength = folder / "lgdistribution.txt"
output = folder / "Length_plot.pdf"
logger = logging.getLogger(__name__)
logger.info("Saving length distribution plot to '%s'", output)
return _rscript_call(
Path("lengths.r"),
LGDIST=flength,
PDFOUT=output,
MISINCORP=fmut,
TITLE=options.title,
VERSION=__version__,
)
def check_r_libraries():
"""Checks if the necessary R libraries are here, signal otherwise"""
logger = logging.getLogger(__name__)
missing_libries = False
for library in ["ggplot2", "gam", "Rcpp", "RcppGSL"]:
command = ["Rscript", "-e", "library(%s)" % (library,)]
if not _log_call(command, quiet=True):
logger.error("Required R library is missing: %r", library)
missing_libries = True
return not missing_libries
def perform_bayesian_estimates(options):
"""Runs the Bayesian estimation program"""
logger = logging.getLogger(__name__)
logger.info("Performing Bayesian estimates")
folder = options.folder.absolute()
# Disable compile time warnings for Rcpp code; these are outside of our control
env = dict(os.environ)
env["PKG_CPPFLAGS"] = env.get("PKG_CPPFLAGS", "") + " -w"
return _rscript_call(
Path("stats") / "runGeneral.r",
GRID_ITER=options.rand,
BURN_IN=options.burn,
ADJUST_ITER=options.adjust,
ITERATIONS=options.iter,
TERMINI=options.termini,
FIX_DISP=not options.var_disp,
SAME_OVERHANGS=not options.diff_hangs,
FIX_NU=bool(options.fix_nicks),
DS_PROTOCOL=not options.single_stranded,
SUB_LENGTH=options.seq_length,
PATH_TO_DAT=str(folder) + "/",
VERBOSE=bool(options.log_level == "DEBUG"),
QUIET=bool(options.log_level not in ("DEBUG", "INFO")),
JUKES_CANTOR=bool(options.jukes_cantor),
USE_RAW_NICK_FREQ=bool(options.use_raw_nick_freq),
USE_BW_THEME=bool(options.theme_bw),
env=env,
)
def _rscript_call(filepath, env=None, **kwargs):
cwd = Path(resource_filename("mapdamage", "r")) / filepath.parent
command = ["Rscript", filepath.name]
for item in sorted(kwargs.items()):
command.append("%s=%s" % item)
return _log_call(command, cwd=cwd, env=env)
def _log_call(command, quiet=False, cwd=None, env=None):
command = [str(value) for value in command]
logger = logging.getLogger(__name__)
logger.debug("Running command %r", " ".join(command))
loglevel = logging.DEBUG if quiet else logging.INFO
start = time.time()
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
start_new_session=True,
cwd=cwd,
env=env,
)
try:
for line in proc.stdout:
logger.log(loglevel, "%s", line.decode("utf-8", errors="replace").rstrip())
returncode = proc.wait()
except:
proc.terminate()
proc.wait()
raise
logger.debug("Call completed in %.2fs with rc %i", time.time() - start, returncode)
if returncode and not quiet:
logger.error("Command returned error %i: %r", returncode, " ".join(command))
return not returncode
| {
"content_hash": "e708df53dee917973c23fc3dfadb9c53",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 87,
"avg_line_length": 29.5625,
"alnum_prop": 0.6295513272257458,
"repo_name": "ginolhac/mapDamage",
"id": "d8b931a4b8f203cd4e0771b9ba557517a3d22069",
"size": "4257",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "mapdamage/rscript.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "36799"
},
{
"name": "Makefile",
"bytes": "191"
},
{
"name": "Python",
"bytes": "69646"
},
{
"name": "R",
"bytes": "64885"
}
],
"symlink_target": ""
} |
from datetime import datetime
from mongoengine import (connect, Document, StringField,
IntField, DateTimeField, ListField)
from dateutil.relativedelta import relativedelta
connect('another-one')
class OneIssue(Document):
issue_number = IntField()
create_time = DateTimeField()
articles = ListField(StringField())
@classmethod
def create(cls, issue_number=None, articles=None):
if not articles:
return
one_issue = cls.get_issue_by_issue_number(issue_number=issue_number) or \
cls(issue_number=issue_number, articles=articles)
one_issue.create_time = one_issue.gen_time()
one_issue.save()
@classmethod
def get_issue_by_issue_number(cls, issue_number=0):
return cls.objects(issue_number=issue_number).first()
def gen_time(self):
return datetime(2012, 6, 11) + relativedelta(days=(self.issue_number-1))
| {
"content_hash": "312a95782db43f9ca55528dd49192d90",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 81,
"avg_line_length": 33.41379310344828,
"alnum_prop": 0.6470588235294118,
"repo_name": "zhy0216/another-one",
"id": "61f1395fe9bc0e5eac3f95806d2762041a29c21d",
"size": "993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "35031"
},
{
"name": "Python",
"bytes": "3181"
}
],
"symlink_target": ""
} |
"""
Stakeholder engagement API
This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers.
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import iengage_client
from iengage_client.rest import ApiException
from iengage_client.models.verve_response_wf_task_list import VerveResponseWFTaskList
class TestVerveResponseWFTaskList(unittest.TestCase):
""" VerveResponseWFTaskList unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testVerveResponseWFTaskList(self):
"""
Test VerveResponseWFTaskList
"""
model = iengage_client.models.verve_response_wf_task_list.VerveResponseWFTaskList()
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "0ecb76c941ee2f473d2345b70a09e2ed",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 186,
"avg_line_length": 24.8,
"alnum_prop": 0.7227822580645161,
"repo_name": "iEngage/python-sdk",
"id": "3394235d98ad21dc0231283387f91e04ee345e09",
"size": "1009",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_verve_response_wf_task_list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2373684"
},
{
"name": "Shell",
"bytes": "1664"
}
],
"symlink_target": ""
} |
"""
Recursive algorithm:
Keep finding the leftmost node, make it upside down and
then make its parent to be its rightmost subtree recursively.
GOTCHA: at the last step when you are making original root to be the right node
of the new tree make sure to create another node instead of copying.
"""
class Solution(object):
def upsideDownBinaryTree(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if not root or not root.left:
return root
left_root = self.upsideDownBinaryTree(root.left)
right_most = left_root
while right_most.right:
right_most = right_most.right
# now that we found the right most node, flip it up
# a one line pythonic assignment, won't work if its multiple lines
root, right_most.left, right_most.right = left_root, root.right, TreeNode(root.val)
return root
| {
"content_hash": "1e018298cbe91b5c2644c8922aeec6c8",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 91,
"avg_line_length": 35.15384615384615,
"alnum_prop": 0.661925601750547,
"repo_name": "young-geng/leet_code",
"id": "4993e2d5d4531075cbed6024a686f0ded1b4e7a9",
"size": "1247",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "problems/156_binary-tree-upside-down/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "599"
},
{
"name": "Python",
"bytes": "111519"
}
],
"symlink_target": ""
} |
try:
import cPickle as pickle
except:
import pickle
from webhelpers.feedgenerator import Rss201rev2Feed as Feed
from datetime import datetime
import logging
import cgi
class Item(object):
def __init__(self, t, l, a, d):
self.title = t
self.link = l
self.author_name = a
self.pubdate = datetime.now()
self.description = d.replace('\n', '<br/>')
def notify(header, body, cfg, link):
fn = cfg['rss.file']
pickle_fn = fn + '.pickle'
try:
with open(pickle_fn, 'rb') as inp:
previous = pickle.load(inp)
except Exception as e:
logging.error(e)
previous = []
previous = previous[:max(10, len(previous))]
feed = Feed(
title=cfg['rss.title'],
link='',
description=cfg['rss.description'],
language=u"en",
)
previous.insert(0, Item(header, link, 'author', body))
for item in previous:
feed.add_item(title=item.title, description=item.description, link=item.link, author_name=item.author_name,
pubdate=item.pubdate)
with open(cfg['rss.file'], 'wb') as out:
feed.write(out, 'utf-8')
with open(pickle_fn, 'wb') as p:
pickle.dump(previous, p) | {
"content_hash": "7383c8fe282bac847f743e79f00d855f",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 115,
"avg_line_length": 29.441860465116278,
"alnum_prop": 0.5813586097946287,
"repo_name": "renemilk/gitnotifs",
"id": "836d6c72e4c1994ac34adb26b224c5a2f61bfc13",
"size": "1266",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/gitnotifs/rss.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "9966"
}
],
"symlink_target": ""
} |
import json
import threading
import time
import uuid
from pika import BasicProperties
from pika.exceptions import ConnectionClosed, ChannelClosed
from ...helper import fill_in_the_blank, log
from .consumer import Consumer
from .exception import NoConnectionError, SubscriptionNotAllowedError
from .helper import active_connection, SHARED_DIRECT_EXCHANGE_NAME, SHARED_TOPIC_EXCHANGE_NAME, SHARED_SIGNAL_CONNECTION_LOSS
class Driver(object):
""" Driver for RabbitMQ
:param url: the URL to the server (``str`` for a single connection or ``list`` for rotation)
:param list consumer_classes: the list of :class:`.consumer.Consumer`-based classes
:param bool unlimited_retries: the flag to disable limited retry count.
:param callable on_connect: a callback function when the message consumption begins.
:param callable on_disconnect: a callback function when the message consumption is interrupted due to unexpected disconnection.
:param callable on_error: a callback function when the message consumption is interrupted due to exception raised from the main callback function.
:param dict default_publishing_options: the default options for publishing (normal)
:param dict default_broadcasting_options: the default options for publishing (broadcast)
:param dict default_consuming_shared_queue_options: the default options for consuming share queue
:param dict default_consuming_distributed_queue_options: the default options for consuming distributed queue
``default_publishing_options`` and ``default_broadcasting_options`` only take ``exchange``
to allow overriding the default exchange.
``default_consuming_shared_queue_options`` and ``default_consuming_distributed_queue_options``
will have the data structure like this::
{
'exchange': {
'name': str, # It is "exchange" in pika's exchange_declare.
'type': str, # It is "exchange_type" in pika's exchange_declare.
}
}
Here is an example for ``on_connect``.
.. code-block:: Python
def on_connect(consumer = None, controller_id = None, route = None, queue_name = None, summary = None):
...
Here is an example for ``on_disconnect``.
.. code-block:: Python
def on_disconnect(consumer = None, controller_id = None, route = None, queue_name = None, summary = None):
...
Here is an example for ``on_error``.
.. code-block:: Python
def on_error(exception, consumer = None, controller_id = None, route = None, queue_name = None, summary = None):
...
Where:
* ``exception`` is the (raised) exception object.
* ``consumer`` is the associate consumer object (optional).
* ``controller_id`` is the associate ID (optional).
* ``route`` is the affected route (optional).
* ``queue_name`` is the affected queue name (optional).
* ``summary`` is the summary of the event (optional).
"""
def __init__(self, url, consumer_classes = None, unlimited_retries = False, on_connect = None,
on_disconnect = None, on_error = None, default_publishing_options : dict = None,
default_broadcasting_options : dict = None, default_consuming_shared_queue_options : dict = None,
default_consuming_distributed_queue_options : dict = None, auto_acknowledge = False,
send_sigterm_on_disconnect = True):
for consumer_class in consumer_classes or []:
assert isinstance(consumer_class, Consumer), 'This ({}) needs to be a subclass of vireo.drivers.rabbitmq.Consumer.'.format(consumer_class)
self._url = url
self._consumer_classes = consumer_classes or []
self._async_listener = None
self._shared_stream = []
self._consumers = []
self._has_term_signal = False
self._active_routes = []
self._auto_acknowledge = auto_acknowledge
self._send_sigterm_on_disconnect = send_sigterm_on_disconnect
self._default_publishing_options = default_publishing_options or {}
self._default_broadcasting_options = default_broadcasting_options or {}
self._default_consuming_shared_queue_options = default_consuming_shared_queue_options or {}
self._default_consuming_distributed_queue_options = default_consuming_distributed_queue_options or {}
self._unlimited_retries = unlimited_retries
self._on_connect = on_connect
self._on_disconnect = on_disconnect
self._on_error = on_error
self._requested_url_counter = 0 # used for rotation
self._total_url_count = 1 if isinstance(self._url, str) else len(self._url)
@property
def url(self):
connection_url = self._url
if isinstance(self._url, (tuple, list)):
self._requested_url_counter += 1
connection_index = self._requested_url_counter % self._total_url_count
connection_url = self._url[connection_index]
return connection_url
def set_on_connect(self, on_connect):
self._on_connect = on_connect
def set_on_disconnect(self, on_disconnect):
self._on_disconnect = on_disconnect
def set_on_error(self, on_error):
self._on_error = on_error
def setup_async_cleanup(self):
""" Prepare to cleanly join all consumers asynchronously. """
if self._async_listener and self._async_listener.is_alive():
raise SubscriptionNotAllowedError('Unable to consume messages as this driver is currently active.')
self._async_listener = threading.Thread(target = self.join)
self._async_listener.start()
def stop_consuming(self):
""" Send the signal to stop consumption. """
self._has_term_signal = True
def join(self):
""" Synchronously join all consumers."""
try:
while True:
if self._has_term_signal:
log('warning', 'Stopping all route listeners')
break
if SHARED_SIGNAL_CONNECTION_LOSS in self._shared_stream:
log('error', 'Unexpected connection loss detected')
log('warning', 'Terminating all route listeners')
break
time.sleep(1)
except KeyboardInterrupt:
log('warning', 'SIGTERM received')
log('debug', 'Terminating all route listeners')
connection_losed = SHARED_SIGNAL_CONNECTION_LOSS in self._shared_stream
for consumer in self._consumers:
if not connection_losed:
if not consumer.is_alive():
log('info', 'Route {}: Already stopped listening (not alive).'.format(consumer.route))
continue
log('warning', 'Route {}: Sending the signal to stop listening.'.format(consumer.route))
consumer.stop()
try:
log('debug', 'Route {}: Terminating the listener.'.format(consumer.route))
consumer._stop()
except AssertionError: # this is raised if the thread lock is still locked.
log('warning', 'Route {}: Probably already stopped'.format(consumer.route))
if not consumer.is_alive():
log('info', 'Route {}: Termination confirmed (killed)'.format(consumer.route))
continue
log('debug', 'Route {}: Waiting the listener to join back to the parent thread.'.format(consumer.route))
consumer.join()
log('info', 'Route {}: Termination confirmed (joined).'.format(consumer.route))
if connection_losed:
raise NoConnectionError('Unexpectedly losed the connection during message consumption')
def publish(self, route, message, options = None, allowed_retry_count = 5):
""" Synchronously publish a message
:param str route: the route
:param str message: the message
:param dict options: additional options for basic_publish
:param bool allowed_retry_count: the flag to allow auto-retry on connection failure
"""
default_parameters = self._generate_default_publish_options(
self._default_publishing_options,
SHARED_DIRECT_EXCHANGE_NAME,
route,
message,
)
options = fill_in_the_blank(options or {}, default_parameters)
self._do_publish(route, message, options, allowed_retry_count)
def _do_publish(self, route, message, options, allowed_retry_count):
with active_connection(self.url, self._on_connect if not allowed_retry_count else None, self._on_disconnect if not allowed_retry_count else None) as channel:
try:
log('debug', 'Publishing: route={} message={} options={}'.format(route, message, options))
channel.basic_publish(**options)
log('debug', 'Published: route={} message={} options={}'.format(route, message, options))
except ConnectionClosed:
if allowed_retry_count:
log('warn', 'RETRY Publishing: route={} message={} options={}'.format(route, message, options))
self._do_publish(route, message, options, allowed_retry_count = allowed_retry_count - 1)
return
if self._on_disconnect:
async_callback = threading.Thread(target = self._on_disconnect, daemon = True)
async_callback.start()
raise NoConnectionError('Unexpectedly losed the connection while publishing a message')
def declare_queue_with_delegation(self, origin_queue_name, ttl, fallback_queue_name = None,
common_queue_options = None, exchange_options = None):
actual_fallback_queue_name = fallback_queue_name or '{}.delegated'.format(origin_queue_name)
exchange_name = 'fallback/{}/{}'.format(origin_queue_name, actual_fallback_queue_name)
exchange_options = fill_in_the_blank(exchange_options or {}, {'internal': True})
common_queue_options = common_queue_options or {}
default_fallback_queue_options = {'auto_delete': False}
default_origin_queue_options = {
'auto_delete' : False,
'arguments' : {
'x-dead-letter-exchange' : exchange_name,
'x-dead-letter-routing-key' : actual_fallback_queue_name,
'x-message-ttl' : ttl,
}
}
fallback_queue_options = fill_in_the_blank(dict(common_queue_options), default_fallback_queue_options)
origin_queue_options = fill_in_the_blank(dict(common_queue_options), default_origin_queue_options)
fill_in_the_blank(exchange_options, {'exchange': exchange_name, 'exchange_type': 'direct'})
with active_connection(self.url, self._on_connect, self._on_disconnect) as channel:
try:
channel.exchange_declare(**exchange_options)
self.declare_queue(actual_fallback_queue_name, fallback_queue_options)
channel.queue_bind(
queue = actual_fallback_queue_name,
exchange = exchange_name,
)
self.declare_queue(origin_queue_name, origin_queue_options)
except ConnectionClosed:
if self._on_disconnect:
async_callback = threading.Thread(target = self._on_disconnect, daemon = True)
async_callback.start()
raise NoConnectionError('Unexpectedly losed the connection while orchestrating queues and exchange for delegation')
def broadcast(self, route, message, options = None, allowed_retry_count = 5):
""" Broadcast a message to a particular route.
:param str route: the route
:param str message: the message
:param dict options: additional options for basic_publish
"""
default_parameters = self._generate_default_publish_options(
self._default_broadcasting_options,
SHARED_TOPIC_EXCHANGE_NAME,
route,
message,
)
options = fill_in_the_blank(options or {}, default_parameters)
if 'exchange' not in options or not options['exchange']:
options['exchange'] = SHARED_TOPIC_EXCHANGE_NAME
exchange_name = options['exchange']
self._do_broadcast(exchange_name, route, message, options, allowed_retry_count)
def _do_broadcast(self, exchange_name, route, message, options, allowed_retry_count):
with active_connection(self.url, self._on_connect if not allowed_retry_count else None, self._on_disconnect if not allowed_retry_count else None) as channel:
try:
log('debug', 'Declaring a shared topic exchange')
channel.exchange_declare(
exchange = exchange_name,
exchange_type = 'topic',
passive = False,
durable = True,
auto_delete = False,
)
log('debug', 'Declared a shared topic exchange')
log('debug', 'Broadcasting: route={} message={} options={}'.format(route, message, options))
channel.basic_publish(**options)
log('debug', 'Broadcasted: route={} message={} options={}'.format(route, message, options))
except ConnectionClosed:
if allowed_retry_count:
log('warn', 'RETRY Broadcasting: route={} message={} options={}'.format(route, message, options))
self._do_broadcast(exchange_name, route, message, options, allowed_retry_count = allowed_retry_count - 1)
return
if self._on_disconnect:
async_callback = threading.Thread(target = self._on_disconnect, daemon = True)
async_callback.start()
raise NoConnectionError('Unexpectedly losed the connection while broadcasting an event')
def observe(self, route, callback, resumable, distributed, options = None,
simple_handling = True, controller_id = None, delay_per_message = 0,
max_retries = None, immediate_retry_limit = None, max_retry_timeout = None):
consumer_class = Consumer
for overriding_consumer_class in self._consumer_classes:
if overriding_consumer_class.can_handle_route(route):
consumer_class = overriding_consumer_class
break
if not controller_id:
controller_id = str(uuid.uuid4())
log('info', 'Observer on {} will have the self-assigned controller ID {}'.format(route, controller_id))
default_options = self._default_consuming_distributed_queue_options if distributed else self._default_consuming_shared_queue_options
given_options = options or {}
queue_options = fill_in_the_blank(given_options.get('queue', {}), default_options.get('queue', {}))
exchange_options = fill_in_the_blank(given_options.get('exchange', {}), default_options.get('exchange', {}))
parameters = dict(
url = self.url,
route = route,
callback = callback,
shared_stream = self._shared_stream,
resumable = resumable,
distributed = distributed,
queue_options = queue_options,
simple_handling = simple_handling,
unlimited_retries = self._unlimited_retries,
on_connect = self._on_connect,
on_disconnect = self._on_disconnect,
on_error = self._on_error,
controller_id = controller_id,
exchange_options = exchange_options,
auto_acknowledge = self._auto_acknowledge,
send_sigterm_on_disconnect = self._send_sigterm_on_disconnect,
delay_per_message = delay_per_message,
max_retries = max_retries,
immediate_retry_limit = immediate_retry_limit,
max_retry_timeout = max_retry_timeout,
)
consumer = consumer_class(**parameters)
self._consumers.append(consumer)
consumer.start()
return consumer
def _generate_default_publish_options(self, default_publishing_options, default_exchange_name,
route, message):
return {
'exchange' : default_publishing_options.get('exchange', default_exchange_name),
'routing_key' : route,
'body' : json.dumps(message),
'properties' : BasicProperties(content_type = 'application/json'),
}
| {
"content_hash": "bb8c6cc509b9842f29813837efd92ddb",
"timestamp": "",
"source": "github",
"line_count": 385,
"max_line_length": 165,
"avg_line_length": 45.527272727272724,
"alnum_prop": 0.5917959835691465,
"repo_name": "shiroyuki/vireo",
"id": "a44ecf44cfa210d44940c9689f66f8f476ce2b70",
"size": "17528",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vireo/drivers/rabbitmq/driver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "978"
},
{
"name": "Python",
"bytes": "118263"
}
],
"symlink_target": ""
} |
import inspect
import os
import subprocess
import socket
import sys
import time
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
mid = 16
connect_packet = mosq_test.gen_connect("retain-qos0-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
publish_packet = mosq_test.gen_publish("retain/qos0/test", qos=0, payload="retained message", retain=True)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(10)
sock.bind(('', 1888))
sock.listen(5)
client_args = sys.argv[1:]
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp'
try:
pp = env['PYTHONPATH']
except KeyError:
pp = ''
env['PYTHONPATH'] = '../../lib/python:'+pp
client = mosq_test.start_client(filename=sys.argv[1].replace('/', '-'), cmd=client_args, env=env)
try:
(conn, address) = sock.accept()
conn.settimeout(10)
if mosq_test.expect_packet(conn, "connect", connect_packet):
conn.send(connack_packet)
if mosq_test.expect_packet(conn, "publish", publish_packet):
rc = 0
conn.close()
finally:
client.terminate()
client.wait()
sock.close()
exit(rc)
| {
"content_hash": "412f3d3ba0745a52bff994435596959b",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 129,
"avg_line_length": 27.054545454545455,
"alnum_prop": 0.6854838709677419,
"repo_name": "krattai/noo-ebs",
"id": "8860c03620b698eb94605ce69bc5574c896811e4",
"size": "1592",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "ref_code/mosquitto-1.4.7/test/lib/04-retain-qos0.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "ActionScript",
"bytes": "2384"
},
{
"name": "Assembly",
"bytes": "4590201"
},
{
"name": "Awk",
"bytes": "396"
},
{
"name": "Batchfile",
"bytes": "19241"
},
{
"name": "C",
"bytes": "15563482"
},
{
"name": "C#",
"bytes": "265955"
},
{
"name": "C++",
"bytes": "691846"
},
{
"name": "CMake",
"bytes": "104078"
},
{
"name": "CSS",
"bytes": "72772"
},
{
"name": "DTrace",
"bytes": "1258"
},
{
"name": "Erlang",
"bytes": "4424888"
},
{
"name": "GAP",
"bytes": "1517"
},
{
"name": "HTML",
"bytes": "65461"
},
{
"name": "Haxe",
"bytes": "6282"
},
{
"name": "Java",
"bytes": "6899"
},
{
"name": "JavaScript",
"bytes": "494026"
},
{
"name": "Lua",
"bytes": "274783"
},
{
"name": "M4",
"bytes": "107581"
},
{
"name": "Makefile",
"bytes": "143161"
},
{
"name": "NSIS",
"bytes": "27658"
},
{
"name": "Objective-C",
"bytes": "13321"
},
{
"name": "PHP",
"bytes": "43263"
},
{
"name": "PLpgSQL",
"bytes": "80625"
},
{
"name": "Perl",
"bytes": "344546"
},
{
"name": "Python",
"bytes": "500718"
},
{
"name": "QML",
"bytes": "150"
},
{
"name": "QMake",
"bytes": "3028"
},
{
"name": "Ragel",
"bytes": "46210"
},
{
"name": "Roff",
"bytes": "120721"
},
{
"name": "Ruby",
"bytes": "121530"
},
{
"name": "Shell",
"bytes": "293349"
},
{
"name": "TeX",
"bytes": "788237"
},
{
"name": "XSLT",
"bytes": "1459"
},
{
"name": "Yacc",
"bytes": "5139"
}
],
"symlink_target": ""
} |
from zope.interface import Interface, Attribute
class ICommand(Interface):
"""
Generic Renamer command.
"""
name = Attribute("""
Command name.
""")
description = Attribute("""
Brief description of the command.
""")
def process(renamer, options):
"""
Called once command line parsing is complete.
@type renamer: L{renamer.application.Renamer}
@type options: C{dict}
"""
class IRenamingCommand(ICommand):
"""
Command that performs renaming on one argument at a time.
"""
defaultNameTemplate = Attribute("""
String template for the default name format to use if one is not supplied.
""")
defaultPrefixTemplate = Attribute("""
String template for the default prefix format to use if one is not
supplied.
""")
def processArgument(argument):
"""
Process an argument.
@rtype: C{dict} mapping C{unicode} to C{unicode}
@return: Mapping of keys to values to substitute info the name
template.
"""
class IRenamingAction(Interface):
"""
An action that performs some renaming-related function and is undoable.
"""
src = Attribute("""
L{twisted.python.filepath.FilePath} to the source file.
""")
dst = Attribute("""
L{twisted.python.filepath.FilePath} to the destination file.
""")
def do(options):
"""
Perform the action.
@type options: C{dict}
"""
def undo(options):
"""
Perform the reverse action.
@type options: C{dict}
"""
| {
"content_hash": "31469b0c34e92389baa2fb358317d604",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 78,
"avg_line_length": 19.602409638554217,
"alnum_prop": 0.594960049170252,
"repo_name": "jonathanj/renamer",
"id": "22e32b7b93d201bdd433d4724afc04be81d0f2cd",
"size": "1627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "renamer/irenamer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "63"
},
{
"name": "Python",
"bytes": "77981"
},
{
"name": "Shell",
"bytes": "562"
}
],
"symlink_target": ""
} |
import copy
import pytest
@pytest.mark.parametrize('model', [('kmeans_pca_topic_model'),
('base_topic_model'),
('custom_topic_model')])
def test_term_rank(model, request):
topic_model = copy.deepcopy(request.getfixturevalue(model))
fig = topic_model.visualize_term_rank()
assert len(fig.to_dict()["data"]) == len(set(topic_model.topics_)) - topic_model._outliers
| {
"content_hash": "4ba24d2d57fd7e6e6906d597b05116cd",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 94,
"avg_line_length": 37.333333333333336,
"alnum_prop": 0.5915178571428571,
"repo_name": "MaartenGr/BERTopic",
"id": "6799efb25ebd33f36f49a7e5c8f3156b1811de22",
"size": "448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_plotting/test_term_rank.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "365"
},
{
"name": "Python",
"bytes": "292635"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.contrib.gis.db.models.fields
class Migration(migrations.Migration):
dependencies = [
('cosmopolitan', '0002_auto_20160128_1936'),
]
operations = [
migrations.CreateModel(
name='Postcode',
fields=[
('id', models.CharField(primary_key=True, serialize=False, max_length=20)),
('location', django.contrib.gis.db.models.fields.PointField(srid=4326)),
('region_name', models.CharField(max_length=100, db_index=True)),
('subregion_name', models.CharField(max_length=100, db_index=True)),
('district_name', models.CharField(max_length=100, db_index=True)),
('country', models.ForeignKey(to='cosmopolitan.Country', related_name='postal_codes')),
],
),
]
| {
"content_hash": "178dd6d9fadf49873c5133a03e930faa",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 103,
"avg_line_length": 36.68,
"alnum_prop": 0.604143947655398,
"repo_name": "openspending/cosmopolitan",
"id": "42034994c51f05a3187f5589c51c68c25d5527db",
"size": "941",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cosmopolitan/migrations/0003_postcode.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1293"
},
{
"name": "Python",
"bytes": "54701"
}
],
"symlink_target": ""
} |
"""
Perm Store
author: Kevin Jamieson, kevin.g.jamieson@gmail.com
last updated: 12/31/2014
Persistent storage solution. Database hierarchy is organized as: ::\n
PermStore[database_id][bucket_id][doc_uid] = {key1:value1,key2:value2,...}
Dependencies: next.constants to determine location of mongoDB server
Some common functions
###############################
Initialization::\n
db = PermStore()
Database functions::\n
exists,didSucceed,message = db.exists(database_id,bucket_id,doc_uid,key)
value,didSucceed,message = db.get(database_id,bucket_id,doc_uid,key)
doc,didSucceed,message = db.getDoc(database_id,bucket_id,doc_uid)
docs,didSucceed,message = db.getDocsByPattern(database_id,bucket_id,filter_dict)
didSucceed,message = db.set(database_id,bucket_id,doc_uid,key,value)
didSucceed,message = db.setDoc(database_id,bucket_id,doc_uid,doc)
didSucceed,message = db.delete(database_id,bucket_id,doc_uid,key)
didSucceed,message = db.deleteDoc(database_id,bucket_id,doc_uid)
didSucceed,message = db.deleteDocsByPattern(database_id,bucket_id,filter_dict)
didSucceed,message = db.deleteBucket(database_id,bucket_id)
didSucceed,message = db.deleteDatabase(database_id)
didSucceed,message = db.deleteAll()
Database inspection ::\n
docNames,didSucceed,message = db.getDocNames(database_id,bucket_id)
bucketNames,didSucceed,message = db.getBucketNames(database_id)
databaseNames,didSucceed,message = db.getDatabaseNames()
Some example usage
###############################
Let's first inititlize the database ::\n
from next.database.PermStore import PermStore
db = PermStore()
And let's assume that the database is empty, which we can enforce by deleting everything ::\n
didSucceed,message = db.deleteAll()
Building up a document one key at a time ::\n
database_id = 'things'
bucket_id = 'animals'
doc_uid = 'cat'
didSucceed,message = db.set(database_id,bucket_id,doc_uid,'color','black')
didSucceed,message = db.set(database_id,bucket_id,doc_uid,'num_legs',4)
didSucceed,message = db.set(database_id,bucket_id,doc_uid,'age',7.5)
Inserting a document ::\n
database_id = 'things'
bucket_id = 'animals'
doc_uid = 'dog'
doc = {'color':'brown','num_legs':4,'age':9.5}
didSucceed,message = db.setDoc(database_id,bucket_id,doc_uid,doc)
doc_uid = 'human'
doc = {'color':'tan','num_legs':2,'age':28}
didSucceed,message = db.setDoc(database_id,bucket_id,doc_uid,doc)
Retrieving values ::\n
value,didSucceed,message = db.get('things','animals','dog','age')
print value
>>> 9.5
Retrieving docs ::\n
doc,didSucceed,message = db.getDoc('things','animals','cat')
print doc
>>> {u'color': u'black', u'age': 7.5, u'_id': u'cat', u'num_legs': 4}
doc,didSucceed,message = db.getDoc('things','animals','dog')
print doc
>>> {u'color': u'brown', u'age': 9.5, u'_id': u'dog', u'num_legs': 4}
Advanced doc retrieval ::\n
docs,didSucceed,message = db.getDocsByPattern('things','animals',{})
print docs
>>> [{u'color': u'black', u'age': 7.5, u'_id': 'cat', u'num_legs': 4}, {u'color': u'brown', u'age': 9.5, u'_id': 'dog', u'num_legs': 4}, {u'color': u'tan', u'age': 28, u'_id': 'human', u'num_legs': 2}]
docs,didSucceed,message = db.getDocsByPattern('things','animals',{'num_legs':4})
>>> [{u'color': u'black', u'age': 7.5, u'_id': 'cat', u'num_legs': 4}, {u'color': u'brown', u'age': 9.5, u'_id': 'dog', u'num_legs': 4}]
docs,didSucceed,message = db.getDocsByPattern('things','animals',{'age':{ '$gte':8,'$lt':10} })
>>> [{u'color': u'brown', u'age': 9.5, u'_id': 'dog', u'num_legs': 4}]
docs,didSucceed,message = db.getDocsByPattern('things','animals',{'age':{ '$gte':8 }, 'num_legs':2 })
>>> [{u'color': u'tan', u'age': 28, u'_id': 'human', u'num_legs': 2}]
Doc retrival with time ::\n
from datetime import datetime,timedelta
t_0 = datetime.now()
t_1 = t_0 + timedelta(0,30)
t_2 = t_1 + timedelta(0,15)
t_3 = t_0 + timedelta(0,55)
# (if doc_uid=None, one is automatically generated)
didSucceed,message = db.setDoc('users','keys',None,{'user_id':'sd89w3hr292r','key':'a0jd103b2r','timestamp':t_0})
didSucceed,message = db.setDoc('users','keys',None,{'user_id':'sd89w3hr292r','key':'w8dh28232f','timestamp':t_1})
didSucceed,message = db.setDoc('users','keys',None,{'user_id':'sd89w3hr292r','key':'89yf9hgfwe','timestamp':t_2})
didSucceed,message = db.setDoc('users','keys',None,{'user_id':'sd89w3hr292r','key':'edhe2dqw9d','timestamp':t_3})
ts = t_1 - timedelta(0,1)
te = t_2 + timedelta(0,1)
docs,didSucceed,message = db.getDocsByPattern('users','keys',{'timestamp':{ '$gte':ts,'$lte':te } })
print docs
>>> [{u'timestamp': '2015-01-23 10:57:14.779000', u'_id': '54c2996c319da682ebb17576', u'user_id': u'sd89w3hr292r', u'key': u'w8dh28232f'}, {u'timestamp': '2015-01-23 10:57:29.779000', u'_id': '54c2996c319da682ebb17577', u'user_id': u'sd89w3hr292r', u'key': u'89yf9hgfwe'}]
"""
from pymongo import MongoClient
import next.constants as constants
from bson.binary import Binary
import cPickle
import traceback
from datetime import datetime
class PermStore(object):
"""
Acts as API to permanent store that can be passed around. Implements MongoDB
Attribtues:
client : MongoDB client
"""
def __init__(self):
self.client = MongoClient(constants.MONGODB_HOST, constants.MONGODB_PORT)
# self.client.write_concern = {'w':0}
def __del__(self):
try:
if self.client!=None:
self.client.close()
except:
pass
def connectToMongoServer(self):
try:
self.client = MongoClient(constants.MONGODB_HOST, constants.MONGODB_PORT)
if self.assertConnection():
# This makes it so the write signal is fired off and and does not wait for acknowledgment
# self.client.write_concern = {'w':0}
return True,''
else:
raise
error = 'Failed to connect to Mongodb server at %s:%s' % (constants.MONGODB_HOST,constants.MONGODB_PORT)
return False,error
return didSuccessfullyConnect,''
except:
return False,'Failed to connect to MongoDB Server'
def assertConnection(self):
"""
Checks that MongoDB is running
Inputs:
None
Outputs:
(boolean) isConnected
Usage: ::\n
db.assertConnection()
"""
try:
return bool(self.client.admin.command('ping')['ok'])
except:
return False
def makeProperDatabaseFormat(self,input_val):
"""
Example of usage: ::\n
>>> from next.database.PermStore import PermStore
>>> db = PermStore()
>>> import numpy
>>> X = numpy.zeros(3)
>>> from datetime import datetime
>>> timestamp = datetime.now()
>>> input = {'animal':'dog','age':4.5,'x':X,'time':timestamp}
>>> db_input = db.makeProperDatabaseFormat(input)
>>> db_input
{'x': Binary('\x80\x02cnumpy.core.multiarray\n_reconstruct\nq\x01cnumpy\nndarray\nq\x02K\x00\x85U\x01b\x87Rq\x03(K\x01K\x03\x85cnumpy\ndtype\nq\x04U\x02f8K\x00K\x01\x87Rq\x05(K\x03U\x01<NNNJ\xff\xff\xff\xffJ\xff\xff\xff\xffK\x00tb\x89U\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00tb.', 0), 'age': 4.5, 'animal': 'dog', 'time': datetime.datetime(2015, 1, 23, 10, 32, 28, 770190)}
>>> db_output = db.undoDatabaseFormat(db_input)
>>> db_output
{'x': array([ 0., 0., 0.]), 'age': 4.5, 'animal': 'dog', 'time': datetime.datetime(2015, 1, 23, 10, 32, 28, 770190)}
>>> input
{'x': array([ 0., 0., 0.]), 'age': 4.5, 'animal': 'dog', 'time': datetime.datetime(2015, 1, 23, 10, 32, 28, 770190)}
"""
if isinstance(input_val,dict):
for key in input_val:
input_val[key] = self.makeProperDatabaseFormat(input_val[key])
elif isinstance(input_val,list):
for idx in range(len(input_val)):
input_val[idx] = self.makeProperDatabaseFormat(input_val[idx])
elif isinstance(input_val, basestring):
pass
elif isinstance(input_val, (int, long, float) ):
pass
elif isinstance(input_val, datetime ):
pass
else:
# pickle value so we can handle any python type
pickled_input = cPickle.dumps(input_val, protocol=2)
input_val = Binary(pickled_input)
return input_val
def undoDatabaseFormat(self,input_val):
if isinstance(input_val,dict):
for key in input_val:
input_val[key] = self.undoDatabaseFormat(input_val[key])
elif isinstance(input_val,list):
for idx in range(len(input_val)):
input_val[idx] = self.undoDatabaseFormat(input_val[idx])
elif isinstance(input_val, Binary):
input_val = cPickle.loads(input_val)
return input_val
def get_index_information(self,database_id,bucket_id):
"""
Returns the description of all the indexes on the bucket
"""
info = self.client[database_id][bucket_id].index_information()
return info,True,''
def create_index(self,database_id,bucket_id,index_dict):
"""
Creates an index on the bucket defined by the keys in index_dict
self.client[database_id][bucket_id].create_index( {'num_eyes':1} )
"""
try:
index_list = []
for key in index_dict:
index_list.append( (key,index_dict[key]) )
message = self.client[database_id][bucket_id].create_index( index_list )
return True,message
except:
return False,'unknown error'
def drop_index(self,database_id,bucket_id,index_name):
"""
Deletes the index named index_name defined over the bucket_id
Inputs:
(string) database_id, (string) index_name
Outputs:
(bool) didSucceed, (string) message
"""
message = self.client[database_id][bucket_id].create_index( index_list )
return True,message
def drop_all_indexes(self,database_id,bucket_id):
"""
Deletes the index named index_name defined over the bucket_id
Inputs:
(string) database_id, (string) index_name
Outputs:
(bool) didSucceed, (string) message
"""
message = self.client[database_id][bucket_id].drop_indexes()
return True,message
def exists(self,database_id,bucket_id,doc_uid,key):
"""
Check existence of key
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key
Outputs:
(bool) exists, (bool) didSucceed, (string) message
Usage: ::\n
exists,didSucceed,message = db.exists(database_id,bucket_id,doc_uid,key)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
doc = self.client[database_id][bucket_id].find_one({"_id":doc_uid,key: { '$exists': True }})
key_exists = (doc!=None)
return key_exists,True,''
except:
error = "MongoDB.exists Failed with unknown exception"
return None,False,error
def get(self,database_id,bucket_id,doc_uid,key):
"""
Get a value corresponding to key, returns None if no key exists
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key
Outputs:
(string) value, (bool) didSucceed, (string) message
Usage: ::\n
value,didSucceed,message = db.get(database_id,bucket_id,doc_uid,key)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
doc = self.client[database_id][bucket_id].find_one({"_id":doc_uid,key: { '$exists': True }})
if doc == None:
message = 'MongoDB.get Key '+bucket_id+'.'+doc_uid+'.'+key+' does not exist'
return None,True,message
value = doc[key]
return_value = self.undoDatabaseFormat(value)
return return_value,True,'From MongoDB'
except:
return None,False,'MongoDB.get Failed with unknown exception'
def getDoc(self,database_id,bucket_id,doc_uid):
"""
get a doc (dictionary of string values) corresponding to a doc_uid with {"doc_uid":doc_uid} (if none, returns None)
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid
Outputs:
({ (string) key: (string) value, ... }) doc, (bool) didSucceed, (string) message
Usage: ::\n
doc,didSucceed,message = db.getDoc(database_id,bucket_id,doc_uid)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
doc = self.client[database_id][bucket_id].find_one({"_id":doc_uid})
return_doc = self.undoDatabaseFormat(doc)
return return_doc,True,''
except:
raise
error = "MongoDB.getDoc Failed with unknown exception"
return None,False,error
def getDocsByPattern(self,database_id,bucket_id,filter_dict):
"""
get all docs that contain {key1:value1,...} according to filter dict (if none, returns None)
Inputs:
(string) database_id, (string) bucket_id, (dict of key ,value strings)
Outputs:
({ (string) key: (string) value, ... }) docs, (bool) didSucceed, (string) message
Usage: ::\n
docs,didSucceed,message = db.getDocsByPattern(database_id,bucket_id,filter_dict)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
docs_iterator = self.client[database_id][bucket_id].find( filter_dict )
docs = []
for doc in docs_iterator:
doc = self.undoDatabaseFormat(doc)
try:
doc['_id'] = str(doc['_id'])
except:
pass
try:
doc['timestamp'] = str(doc['timestamp'])
except:
pass
docs.append(doc)
return docs,True,''
except:
error = "MongoDB.getDocs Failed with unknown exception"
return None,False,error
def increment(self,database_id,bucket_id,doc_uid,key,value):
"""
increments a key by amount value. If key does not exist, sets {key:value}
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key, (int) value
Outputs:
(int) new_value, (bool) didSucceed, (string) message
Usage: ::\n
new_value,didSucceed,message = db.increment(database_id,bucket_id,doc_uid,key,value)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return False,message
try:
new_doc = self.client[database_id][bucket_id].find_and_modify(query={"_id":doc_uid} , update={ '$inc': {key:value} },upsert = True,new=True )
new_value = new_doc[key]
return new_value,True,'From Mongo'
except:
raise
error = "MongoDB.set Failed with unknown exception"
return False,error
def get_list(self,database_id,bucket_id,doc_uid,key):
"""
gets saved by key. If key does not exist, returns None
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key
Outputs:
(list) list_value, (bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.get_list(database_id,bucket_id,doc_uid,key,value)
"""
return self.get(database_id,bucket_id,doc_uid,key)
def append_list(self,database_id,bucket_id,doc_uid,key,value):
"""
appends value to list saved by key. If key does not exist, sets {key:value}
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key, (int) value
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.set(database_id,bucket_id,doc_uid,key,value)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return False,message
try:
value = self.makeProperDatabaseFormat(value)
message = self.client[database_id][bucket_id].update( {"_id":doc_uid} , { '$push': {key:value} },upsert = True )
return True,message
except:
raise
error = "MongoDB.set Failed with unknown exception"
return False,error
def set_list(self,database_id,bucket_id,doc_uid,key,value_list):
"""
sets a list to {key,value_list} (if already exists, replaces)
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key, (list) value_list
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.set_list(database_id,bucket_id,doc_uid,key,value)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return False,message
try:
value_list = self.makeProperDatabaseFormat(value_list)
self.client[database_id][bucket_id].update( {"_id":doc_uid} , { '$unset': {key: '' } },upsert = True )
self.client[database_id][bucket_id].update( {"_id":doc_uid} , { '$push': {key: { '$each': value_list } } },upsert = True )
return True,''
except:
raise
error = "MongoDB.set Failed with unknown exception"
return False,error
def set(self,database_id,bucket_id,doc_uid,key,value):
"""
sets a {key,value} (if already exists, replaces)
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key, (string) value
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.set(database_id,bucket_id,doc_uid,key,value)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return False,message
try:
value = self.makeProperDatabaseFormat(value)
message = self.client[database_id][bucket_id].update( {"_id":doc_uid} , { '$set': {key:value} },upsert = True )
return True,''
except:
raise
error = "MongoDB.set Failed with unknown exception"
return False,error
def setDoc(self,database_id,bucket_id,doc_uid,doc):
"""
set a doc (dictionary of string values). If doc_uid==None, uid automatically assigned
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, ({ (string) key: (string) value, ... }) doc
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.setDoc(database_id,bucket_id,doc_uid,key,value)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return False,message
try:
if doc_uid != None:
doc['_id'] = doc_uid
doc = self.makeProperDatabaseFormat(doc)
write_id = self.client[database_id][bucket_id].insert(doc)
return True,''
except:
error = "MongoDB.insert Failed with unknown exception"
return False,error
def delete(self,database_id,bucket_id,doc_uid,key):
"""
deletes {key:value} associated with given key
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid, (string) key
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.delete(database_id,bucket_id,doc_uid,key)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
self.client[database_id][bucket_id].update( {"_id":doc_uid} , { '$unset': {key:1} })
return True,"MongoDB.delete"
except:
error = "MongoDB.deleteBucket Failed with unknown exception"
return False,error
def deleteDoc(self,database_id,bucket_id,doc_uid):
"""
deletes doc associated with given doc_uid
Inputs:
(string) database_id, (string) bucket_id, (string) doc_uid
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.deleteDoc(database_id,bucket_id,doc_uid)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
self.client[database_id][bucket_id].remove( {'_id':doc_uid} )
return True,''
except:
error = "MongoDB.deleteBucket Failed with unknown exception"
return False,error
def deleteDocsByPattern(self,database_id,bucket_id,filter_dict):
"""
delete all docs that contain {key1:value1,...} according to filter dict (if none, returns None)
Inputs:
(string) database_id, (string) bucket_id, (dict of key,value strings)
Outputs:
({ (string) key: (string) value, ... }) docs, (bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.deleteDocsByPattern(database_id,bucket_id,filter_dict)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return False,message
try:
dict_return = self.client[database_id][bucket_id].remove( filter_dict )
return True,str(dict_return)
except Exception, err:
error = traceback.format_exc()
return False,error
def deleteBucket(self,database_id,bucket_id):
"""
deletes bucket (and all docs in it) associated with given bucket_id
Inputs:
(string) database_id, (string) bucket_id
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.deleteBucket(database_id,bucket_id)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
self.client[database_id][bucket_id].drop()
return True,''
except:
error = "MongoDB.deleteBucket Failed with unknown exception"
return False,error
def deleteDatabase(self,database_id):
"""
deletes database (and all docs in it) associated with given bucket_id
Inputs:
(string) database_id
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.deleteDatabase(database_id)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
self.client.drop_database(database_id)
return True,''
except:
error = "MongoDB.deleteDatabase Failed with unknown exception"
return False,error
def deleteAll(self):
"""
delete all databases (i.e. everything)
Inputs:
None
Outputs:
(bool) didSucceed, (string) message
Usage: ::\n
didSucceed,message = db.deleteAll()
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
db_list = self.client.database_names()
for database_id in db_list:
if (database_id != 'local') and (database_id != 'admin'):
didSucceed,message = self.deleteDatabase(database_id)
return True,''
except:
error = "MongoDB.deleteDatabase Failed with unknown exception"
return False,error
def getDocNames(self,database_id,bucket_id):
"""
get list of doc_uids correspding to all the docs in the bucket corresponding to the given bucket_id
Inputs:
(string) database_id, (string) bucket_id
Outputs:
([(string) doc_uid, ... ]) docNames, (bool) didSucceed, (string) message
Usage: ::\n
docNames,didSucceed,message = db.getDocNames(database_id,bucket_id)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
docs_iterator = self.client[database_id][bucket_id].find()
doc_names = [doc['_id'] for doc in docs_iterator]
return doc_names,True,''
except:
error = "MongoDB.getDocNames Failed with unknown exception"
return None,False,error
def getBucketNames(self,database_id):
"""
get list of bucket_ids for corresponding database_id
Inputs:
(string) database_id
Outputs:
([(string) bucket_id, ... ]) docNames, (bool) didSucceed, (string) message
Usage: ::\n
bucketNames,didSucceed,message = db.getBucketNames(database_id)
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
buckets_iterator = self.client[database_id].collection_names()
bucket_names = [bucket for bucket in buckets_iterator]
return bucket_names,True,''
except:
error = "MongoDB.getBucketNames Failed with unknown exception"
return None,False,error
def getDatabaseNames(self):
"""
gets list of database names (currently just app_data and app_logs, by default all above methods only funciton on app_data aside from the logs)
Inputs:
None
Outputs:
([(string) bucket_id, ... ]) databaseNames, (bool) didSucceed, (string) message
Usage:
databaseNames,didSucceed,message = db.getDatabaseNames()
"""
if self.client == None:
didSucceed,message = self.connectToMongoServer()
if not didSucceed:
return None,False,message
try:
databases_iterator = self.client.database_names()
database_names = [database for database in databases_iterator]
return database_names,True,''
except:
error = "MongoDB.getDatabaseNames Failed with unknown exception"
return None,False,error
| {
"content_hash": "a2300deed553772a2f91999d8606fdbb",
"timestamp": "",
"source": "github",
"line_count": 823,
"max_line_length": 445,
"avg_line_length": 35.24908869987849,
"alnum_prop": 0.5640813512581868,
"repo_name": "lalitkumarj/NEXT-psych",
"id": "cad5a684fa88bc010501b8d3136204d10aadceba",
"size": "29010",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "next/database_client/PermStore/PermStore.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "197644"
},
{
"name": "HTML",
"bytes": "358541"
},
{
"name": "Java",
"bytes": "1496"
},
{
"name": "JavaScript",
"bytes": "2714780"
},
{
"name": "Makefile",
"bytes": "2880"
},
{
"name": "Perl",
"bytes": "5546"
},
{
"name": "Python",
"bytes": "782418"
},
{
"name": "Shell",
"bytes": "7340"
}
],
"symlink_target": ""
} |
from typing import List
from PyQt5.QtMultimedia import (QVideoFrame, QAbstractVideoBuffer, QAbstractVideoSurface,
QVideoSurfaceFormat)
from PyQt5.QtGui import QImage
from PyQt5.QtCore import QObject, pyqtSignal
from electrum_ltc.i18n import _
from electrum_ltc.logging import get_logger
_logger = get_logger(__name__)
class QrReaderVideoSurface(QAbstractVideoSurface):
"""
Receives QVideoFrames from QCamera, converts them into a QImage, flips the X and Y axis if
necessary and sends them to listeners via the frame_available event.
"""
def __init__(self, parent: QObject = None):
super().__init__(parent)
def present(self, frame: QVideoFrame) -> bool:
if not frame.isValid():
return False
image_format = QVideoFrame.imageFormatFromPixelFormat(frame.pixelFormat())
if image_format == QVideoFrame.Format_Invalid:
_logger.info(_('QR code scanner for video frame with invalid pixel format'))
return False
if not frame.map(QAbstractVideoBuffer.ReadOnly):
_logger.info(_('QR code scanner failed to map video frame'))
return False
try:
img = QImage(int(frame.bits()), frame.width(), frame.height(), image_format)
# Check whether we need to flip the image on any axis
surface_format = self.surfaceFormat()
flip_x = surface_format.isMirrored()
flip_y = surface_format.scanLineDirection() == QVideoSurfaceFormat.BottomToTop
# Mirror the image if needed
if flip_x or flip_y:
img = img.mirrored(flip_x, flip_y)
# Create a copy of the image so the original frame data can be freed
img = img.copy()
finally:
frame.unmap()
self.frame_available.emit(img)
return True
def supportedPixelFormats(self, handle_type: QAbstractVideoBuffer.HandleType) -> List[QVideoFrame.PixelFormat]:
if handle_type == QAbstractVideoBuffer.NoHandle:
# We support all pixel formats that can be understood by QImage directly
return [QVideoFrame.Format_ARGB32, QVideoFrame.Format_ARGB32_Premultiplied,
QVideoFrame.Format_RGB32, QVideoFrame.Format_RGB24, QVideoFrame.Format_RGB565,
QVideoFrame.Format_RGB555, QVideoFrame.Format_ARGB8565_Premultiplied]
return []
frame_available = pyqtSignal(QImage)
| {
"content_hash": "ad06256ad79168ae3348acd244316678",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 115,
"avg_line_length": 37.57575757575758,
"alnum_prop": 0.6560483870967742,
"repo_name": "pooler/electrum-ltc",
"id": "7d2b00685d90585e354a859b923014b8622e2974",
"size": "3663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "electrum_ltc/gui/qt/qrreader/qtmultimedia/video_surface.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "13024"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "Java",
"bytes": "2929"
},
{
"name": "Makefile",
"bytes": "2193"
},
{
"name": "NSIS",
"bytes": "7354"
},
{
"name": "Python",
"bytes": "5325268"
},
{
"name": "QML",
"bytes": "318745"
},
{
"name": "Ruby",
"bytes": "16856"
},
{
"name": "Shell",
"bytes": "105672"
},
{
"name": "kvlang",
"bytes": "70748"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('let_me_app', '0019_court_name'),
]
operations = [
migrations.AlterField(
model_name='bookingpolicy',
name='price',
field=models.DecimalField(decimal_places=2, max_digits=8, verbose_name='estimated price'),
),
migrations.AlterField(
model_name='event',
name='preliminary_price',
field=models.DecimalField(decimal_places=2, max_digits=8, verbose_name='Preliminary price'),
),
]
| {
"content_hash": "24cfc12570b4d660c457c65cedf4a2c1",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 104,
"avg_line_length": 28.17391304347826,
"alnum_prop": 0.6049382716049383,
"repo_name": "oleg-chubin/let_me_play",
"id": "49f0ff996a9baaf28108c4b4b4e5592b23d196fd",
"size": "672",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "let_me_app/migrations/0020_auto_20170902_1854.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18176"
},
{
"name": "HTML",
"bytes": "144720"
},
{
"name": "JavaScript",
"bytes": "158832"
},
{
"name": "Python",
"bytes": "265023"
},
{
"name": "Shell",
"bytes": "3061"
}
],
"symlink_target": ""
} |
"""SCons.Action
This encapsulates information about executing any sort of action that
can build one or more target Nodes (typically files) from one or more
source Nodes (also typically files) given a specific Environment.
The base class here is ActionBase. The base class supplies just a few
OO utility methods and some generic methods for displaying information
about an Action in response to the various commands that control printing.
A second-level base class is _ActionAction. This extends ActionBase
by providing the methods that can be used to show and perform an
action. True Action objects will subclass _ActionAction; Action
factory class objects will subclass ActionBase.
The heavy lifting is handled by subclasses for the different types of
actions we might execute:
CommandAction
CommandGeneratorAction
FunctionAction
ListAction
The subclasses supply the following public interface methods used by
other modules:
__call__()
THE public interface, "calling" an Action object executes the
command or Python function. This also takes care of printing
a pre-substitution command for debugging purposes.
get_contents()
Fetches the "contents" of an Action for signature calculation
plus the varlist. This is what gets MD5 checksummed to decide
if a target needs to be rebuilt because its action changed.
genstring()
Returns a string representation of the Action *without*
command substitution, but allows a CommandGeneratorAction to
generate the right action based on the specified target,
source and env. This is used by the Signature subsystem
(through the Executor) to obtain an (imprecise) representation
of the Action operation for informative purposes.
Subclasses also supply the following methods for internal use within
this module:
__str__()
Returns a string approximation of the Action; no variable
substitution is performed.
execute()
The internal method that really, truly, actually handles the
execution of a command or Python function. This is used so
that the __call__() methods can take care of displaying any
pre-substitution representations, and *then* execute an action
without worrying about the specific Actions involved.
get_presig()
Fetches the "contents" of a subclass for signature calculation.
The varlist is added to this to produce the Action's contents.
strfunction()
Returns a substituted string representation of the Action.
This is used by the _ActionAction.show() command to display the
command/function that will be executed to generate the target(s).
There is a related independent ActionCaller class that looks like a
regular Action, and which serves as a wrapper for arbitrary functions
that we want to let the user specify the arguments to now, but actually
execute later (when an out-of-date check determines that it's needed to
be executed, for example). Objects of this class are returned by an
ActionFactory class that provides a __call__() method as a convenient
way for wrapping up the functions.
"""
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Action.py 4043 2009/02/23 09:06:45 scons"
import cPickle
import dis
import os
import re
import string
import sys
import subprocess
from SCons.Debug import logInstanceCreation
import SCons.Errors
import SCons.Executor
import SCons.Util
import SCons.Subst
# we use these a lot, so try to optimize them
is_String = SCons.Util.is_String
is_List = SCons.Util.is_List
class _null:
pass
print_actions = 1
execute_actions = 1
print_actions_presub = 0
def rfile(n):
try:
return n.rfile()
except AttributeError:
return n
def default_exitstatfunc(s):
return s
try:
SET_LINENO = dis.SET_LINENO
HAVE_ARGUMENT = dis.HAVE_ARGUMENT
except AttributeError:
remove_set_lineno_codes = lambda x: x
else:
def remove_set_lineno_codes(code):
result = []
n = len(code)
i = 0
while i < n:
c = code[i]
op = ord(c)
if op >= HAVE_ARGUMENT:
if op != SET_LINENO:
result.append(code[i:i+3])
i = i+3
else:
result.append(c)
i = i+1
return string.join(result, '')
strip_quotes = re.compile('^[\'"](.*)[\'"]$')
def _callable_contents(obj):
"""Return the signature contents of a callable Python object.
"""
try:
# Test if obj is a method.
return _function_contents(obj.im_func)
except AttributeError:
try:
# Test if obj is a callable object.
return _function_contents(obj.__call__.im_func)
except AttributeError:
try:
# Test if obj is a code object.
return _code_contents(obj)
except AttributeError:
# Test if obj is a function object.
return _function_contents(obj)
def _object_contents(obj):
"""Return the signature contents of any Python object.
We have to handle the case where object contains a code object
since it can be pickled directly.
"""
try:
# Test if obj is a method.
return _function_contents(obj.im_func)
except AttributeError:
try:
# Test if obj is a callable object.
return _function_contents(obj.__call__.im_func)
except AttributeError:
try:
# Test if obj is a code object.
return _code_contents(obj)
except AttributeError:
try:
# Test if obj is a function object.
return _function_contents(obj)
except AttributeError:
# Should be a pickable Python object.
try:
return cPickle.dumps(obj)
except (cPickle.PicklingError, TypeError):
# This is weird, but it seems that nested classes
# are unpickable. The Python docs say it should
# always be a PicklingError, but some Python
# versions seem to return TypeError. Just do
# the best we can.
return str(obj)
def _code_contents(code):
"""Return the signature contents of a code object.
By providing direct access to the code object of the
function, Python makes this extremely easy. Hooray!
Unfortunately, older versions of Python include line
number indications in the compiled byte code. Boo!
So we remove the line number byte codes to prevent
recompilations from moving a Python function.
"""
contents = []
# The code contents depends on the number of local variables
# but not their actual names.
contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames)))
try:
contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars)))
except AttributeError:
# Older versions of Python do not support closures.
contents.append(",0,0")
# The code contents depends on any constants accessed by the
# function. Note that we have to call _object_contents on each
# constants because the code object of nested functions can
# show-up among the constants.
#
# Note that we also always ignore the first entry of co_consts
# which contains the function doc string. We assume that the
# function does not access its doc string.
contents.append(',(' + string.join(map(_object_contents,code.co_consts[1:]),',') + ')')
# The code contents depends on the variable names used to
# accessed global variable, as changing the variable name changes
# the variable actually accessed and therefore changes the
# function result.
contents.append(',(' + string.join(map(_object_contents,code.co_names),',') + ')')
# The code contents depends on its actual code!!!
contents.append(',(' + str(remove_set_lineno_codes(code.co_code)) + ')')
return string.join(contents, '')
def _function_contents(func):
"""Return the signature contents of a function."""
contents = [_code_contents(func.func_code)]
# The function contents depends on the value of defaults arguments
if func.func_defaults:
contents.append(',(' + string.join(map(_object_contents,func.func_defaults),',') + ')')
else:
contents.append(',()')
# The function contents depends on the closure captured cell values.
try:
closure = func.func_closure or []
except AttributeError:
# Older versions of Python do not support closures.
closure = []
#xxx = [_object_contents(x.cell_contents) for x in closure]
try:
xxx = map(lambda x: _object_contents(x.cell_contents), closure)
except AttributeError:
xxx = []
contents.append(',(' + string.join(xxx, ',') + ')')
return string.join(contents, '')
def _actionAppend(act1, act2):
# This function knows how to slap two actions together.
# Mainly, it handles ListActions by concatenating into
# a single ListAction.
a1 = Action(act1)
a2 = Action(act2)
if a1 is None or a2 is None:
raise TypeError, "Cannot append %s to %s" % (type(act1), type(act2))
if isinstance(a1, ListAction):
if isinstance(a2, ListAction):
return ListAction(a1.list + a2.list)
else:
return ListAction(a1.list + [ a2 ])
else:
if isinstance(a2, ListAction):
return ListAction([ a1 ] + a2.list)
else:
return ListAction([ a1, a2 ])
def _do_create_keywords(args, kw):
"""This converts any arguments after the action argument into
their equivalent keywords and adds them to the kw argument.
"""
v = kw.get('varlist', ())
# prevent varlist="FOO" from being interpreted as ['F', 'O', 'O']
if is_String(v): v = (v,)
kw['varlist'] = tuple(v)
if args:
# turn positional args into equivalent keywords
cmdstrfunc = args[0]
if cmdstrfunc is None or is_String(cmdstrfunc):
kw['cmdstr'] = cmdstrfunc
elif callable(cmdstrfunc):
kw['strfunction'] = cmdstrfunc
else:
raise SCons.Errors.UserError(
'Invalid command display variable type. '
'You must either pass a string or a callback which '
'accepts (target, source, env) as parameters.')
if len(args) > 1:
kw['varlist'] = args[1:] + kw['varlist']
if kw.get('strfunction', _null) is not _null \
and kw.get('cmdstr', _null) is not _null:
raise SCons.Errors.UserError(
'Cannot have both strfunction and cmdstr args to Action()')
def _do_create_action(act, kw):
"""This is the actual "implementation" for the
Action factory method, below. This handles the
fact that passing lists to Action() itself has
different semantics than passing lists as elements
of lists.
The former will create a ListAction, the latter
will create a CommandAction by converting the inner
list elements to strings."""
if isinstance(act, ActionBase):
return act
if is_List(act):
#TODO(1.5) return CommandAction(act, **kw)
return apply(CommandAction, (act,), kw)
if callable(act):
try:
gen = kw['generator']
del kw['generator']
except KeyError:
gen = 0
if gen:
action_type = CommandGeneratorAction
else:
action_type = FunctionAction
return action_type(act, kw)
if is_String(act):
var=SCons.Util.get_environment_var(act)
if var:
# This looks like a string that is purely an Environment
# variable reference, like "$FOO" or "${FOO}". We do
# something special here...we lazily evaluate the contents
# of that Environment variable, so a user could put something
# like a function or a CommandGenerator in that variable
# instead of a string.
return LazyAction(var, kw)
commands = string.split(str(act), '\n')
if len(commands) == 1:
#TODO(1.5) return CommandAction(commands[0], **kw)
return apply(CommandAction, (commands[0],), kw)
# The list of string commands may include a LazyAction, so we
# reprocess them via _do_create_list_action.
return _do_create_list_action(commands, kw)
return None
def _do_create_list_action(act, kw):
"""A factory for list actions. Convert the input list into Actions
and then wrap them in a ListAction."""
acts = []
for a in act:
aa = _do_create_action(a, kw)
if aa is not None: acts.append(aa)
if not acts:
return ListAction([])
elif len(acts) == 1:
return acts[0]
else:
return ListAction(acts)
def Action(act, *args, **kw):
"""A factory for action objects."""
# Really simple: the _do_create_* routines do the heavy lifting.
_do_create_keywords(args, kw)
if is_List(act):
return _do_create_list_action(act, kw)
return _do_create_action(act, kw)
class ActionBase:
"""Base class for all types of action objects that can be held by
other objects (Builders, Executors, etc.) This provides the
common methods for manipulating and combining those actions."""
def __cmp__(self, other):
return cmp(self.__dict__, other)
def no_batch_key(self, env, target, source):
return None
batch_key = no_batch_key
def genstring(self, target, source, env):
return str(self)
def get_contents(self, target, source, env):
result = [ self.get_presig(target, source, env) ]
# This should never happen, as the Action() factory should wrap
# the varlist, but just in case an action is created directly,
# we duplicate this check here.
vl = self.varlist
if is_String(vl): vl = (vl,)
for v in vl:
result.append(env.subst('${'+v+'}'))
return string.join(result, '')
def __add__(self, other):
return _actionAppend(self, other)
def __radd__(self, other):
return _actionAppend(other, self)
def presub_lines(self, env):
# CommandGeneratorAction needs a real environment
# in order to return the proper string here, since
# it may call LazyAction, which looks up a key
# in that env. So we temporarily remember the env here,
# and CommandGeneratorAction will use this env
# when it calls its _generate method.
self.presub_env = env
lines = string.split(str(self), '\n')
self.presub_env = None # don't need this any more
return lines
def get_targets(self, env, executor):
"""
Returns the type of targets ($TARGETS, $CHANGED_TARGETS) used
by this action.
"""
return self.targets
class _ActionAction(ActionBase):
"""Base class for actions that create output objects."""
def __init__(self, cmdstr=_null, strfunction=_null, varlist=(),
presub=_null, chdir=None, exitstatfunc=None,
batch_key=None, targets='$TARGETS',
**kw):
self.cmdstr = cmdstr
if strfunction is not _null:
if strfunction is None:
self.cmdstr = None
else:
self.strfunction = strfunction
self.varlist = varlist
self.presub = presub
self.chdir = chdir
if not exitstatfunc:
exitstatfunc = default_exitstatfunc
self.exitstatfunc = exitstatfunc
self.targets = targets
if batch_key:
if not callable(batch_key):
# They have set batch_key, but not to their own
# callable. The default behavior here will batch
# *all* targets+sources using this action, separated
# for each construction environment.
def default_batch_key(self, env, target, source):
return (id(self), id(env))
batch_key = default_batch_key
SCons.Util.AddMethod(self, batch_key, 'batch_key')
def print_cmd_line(self, s, target, source, env):
sys.stdout.write(s + "\n")
def __call__(self, target, source, env,
exitstatfunc=_null,
presub=_null,
show=_null,
execute=_null,
chdir=_null,
executor=None):
if not is_List(target):
target = [target]
if not is_List(source):
source = [source]
if presub is _null:
presub = self.presub
if presub is _null:
presub = print_actions_presub
if exitstatfunc is _null: exitstatfunc = self.exitstatfunc
if show is _null: show = print_actions
if execute is _null: execute = execute_actions
if chdir is _null: chdir = self.chdir
save_cwd = None
if chdir:
save_cwd = os.getcwd()
try:
chdir = str(chdir.abspath)
except AttributeError:
if not is_String(chdir):
if executor:
chdir = str(executor.batches[0].targets[0].dir)
else:
chdir = str(target[0].dir)
if presub:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
t = string.join(map(str, target), ' and ')
l = string.join(self.presub_lines(env), '\n ')
out = "Building %s with action:\n %s\n" % (t, l)
sys.stdout.write(out)
cmd = None
if show and self.strfunction:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
try:
cmd = self.strfunction(target, source, env, executor)
except TypeError:
cmd = self.strfunction(target, source, env)
if cmd:
if chdir:
cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd
try:
get = env.get
except AttributeError:
print_func = self.print_cmd_line
else:
print_func = get('PRINT_CMD_LINE_FUNC')
if not print_func:
print_func = self.print_cmd_line
print_func(cmd, target, source, env)
stat = 0
if execute:
if chdir:
os.chdir(chdir)
try:
stat = self.execute(target, source, env, executor=executor)
if isinstance(stat, SCons.Errors.BuildError):
s = exitstatfunc(stat.status)
if s:
stat.status = s
else:
stat = s
else:
stat = exitstatfunc(stat)
finally:
if save_cwd:
os.chdir(save_cwd)
if cmd and save_cwd:
print_func('os.chdir(%s)' % repr(save_cwd), target, source, env)
return stat
def _string_from_cmd_list(cmd_list):
"""Takes a list of command line arguments and returns a pretty
representation for printing."""
cl = []
for arg in map(str, cmd_list):
if ' ' in arg or '\t' in arg:
arg = '"' + arg + '"'
cl.append(arg)
return string.join(cl)
# A fiddlin' little function that has an 'import SCons.Environment' which
# can't be moved to the top level without creating an import loop. Since
# this import creates a local variable named 'SCons', it blocks access to
# the global variable, so we move it here to prevent complaints about local
# variables being used uninitialized.
default_ENV = None
def get_default_ENV(env):
global default_ENV
try:
return env['ENV']
except KeyError:
if not default_ENV:
import SCons.Environment
# This is a hideously expensive way to get a default shell
# environment. What it really should do is run the platform
# setup to get the default ENV. Fortunately, it's incredibly
# rare for an Environment not to have a shell environment, so
# we're not going to worry about it overmuch.
default_ENV = SCons.Environment.Environment()['ENV']
return default_ENV
# This function is still in draft mode. We're going to need something like
# it in the long run as more and more places use subprocess, but I'm sure
# it'll have to be tweaked to get the full desired functionality.
# one special arg (so far?), 'error', to tell what to do with exceptions.
def _subproc(env, cmd, error = 'ignore', **kw):
"""Do common setup for a subprocess.Popen() call"""
# allow std{in,out,err} to be "'devnull'"
io = kw.get('stdin')
if is_String(io) and io == 'devnull':
kw['stdin'] = open(os.devnull)
io = kw.get('stdout')
if is_String(io) and io == 'devnull':
kw['stdout'] = open(os.devnull, 'w')
io = kw.get('stderr')
if is_String(io) and io == 'devnull':
kw['stderr'] = open(os.devnull, 'w')
# Figure out what shell environment to use
ENV = kw.get('env', None)
if ENV is None: ENV = get_default_ENV(env)
# Ensure that the ENV values are all strings:
new_env = {}
for key, value in ENV.items():
if is_List(value):
# If the value is a list, then we assume it is a path list,
# because that's a pretty common list-like value to stick
# in an environment variable:
value = SCons.Util.flatten_sequence(value)
new_env[key] = string.join(map(str, value), os.pathsep)
else:
# It's either a string or something else. If it's a string,
# we still want to call str() because it might be a *Unicode*
# string, which makes subprocess.Popen() gag. If it isn't a
# string or a list, then we just coerce it to a string, which
# is the proper way to handle Dir and File instances and will
# produce something reasonable for just about everything else:
new_env[key] = str(value)
kw['env'] = new_env
try:
#FUTURE return subprocess.Popen(cmd, **kw)
return apply(subprocess.Popen, (cmd,), kw)
except EnvironmentError, e:
if error == 'raise': raise
# return a dummy Popen instance that only returns error
class dummyPopen:
def __init__(self, e): self.exception = e
def communicate(self): return ('','')
def wait(self): return -self.exception.errno
stdin = None
class f:
def read(self): return ''
def readline(self): return ''
stdout = stderr = f()
return dummyPopen(e)
class CommandAction(_ActionAction):
"""Class for command-execution actions."""
def __init__(self, cmd, **kw):
# Cmd can actually be a list or a single item; if it's a
# single item it should be the command string to execute; if a
# list then it should be the words of the command string to
# execute. Only a single command should be executed by this
# object; lists of commands should be handled by embedding
# these objects in a ListAction object (which the Action()
# factory above does). cmd will be passed to
# Environment.subst_list() for substituting environment
# variables.
if __debug__: logInstanceCreation(self, 'Action.CommandAction')
#TODO(1.5) _ActionAction.__init__(self, **kw)
apply(_ActionAction.__init__, (self,), kw)
if is_List(cmd):
if filter(is_List, cmd):
raise TypeError, "CommandAction should be given only " \
"a single command"
self.cmd_list = cmd
def __str__(self):
if is_List(self.cmd_list):
return string.join(map(str, self.cmd_list), ' ')
return str(self.cmd_list)
def process(self, target, source, env, executor=None):
if executor:
result = env.subst_list(self.cmd_list, 0, executor=executor)
else:
result = env.subst_list(self.cmd_list, 0, target, source)
silent = None
ignore = None
while 1:
try: c = result[0][0][0]
except IndexError: c = None
if c == '@': silent = 1
elif c == '-': ignore = 1
else: break
result[0][0] = result[0][0][1:]
try:
if not result[0][0]:
result[0] = result[0][1:]
except IndexError:
pass
return result, ignore, silent
def strfunction(self, target, source, env, executor=None):
if self.cmdstr is None:
return None
if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor)
else:
c = env.subst(self.cmdstr, SUBST_RAW, target, source)
if c:
return c
cmd_list, ignore, silent = self.process(target, source, env, executor)
if silent:
return ''
return _string_from_cmd_list(cmd_list[0])
def execute(self, target, source, env, executor=None):
"""Execute a command action.
This will handle lists of commands as well as individual commands,
because construction variable substitution may turn a single
"command" into a list. This means that this class can actually
handle lists of commands, even though that's not how we use it
externally.
"""
escape_list = SCons.Subst.escape_list
flatten_sequence = SCons.Util.flatten_sequence
try:
shell = env['SHELL']
except KeyError:
raise SCons.Errors.UserError('Missing SHELL construction variable.')
try:
spawn = env['SPAWN']
except KeyError:
raise SCons.Errors.UserError('Missing SPAWN construction variable.')
else:
if is_String(spawn):
spawn = env.subst(spawn, raw=1, conv=lambda x: x)
escape = env.get('ESCAPE', lambda x: x)
ENV = get_default_ENV(env)
# Ensure that the ENV values are all strings:
for key, value in ENV.items():
if not is_String(value):
if is_List(value):
# If the value is a list, then we assume it is a
# path list, because that's a pretty common list-like
# value to stick in an environment variable:
value = flatten_sequence(value)
ENV[key] = string.join(map(str, value), os.pathsep)
else:
# If it isn't a string or a list, then we just coerce
# it to a string, which is the proper way to handle
# Dir and File instances and will produce something
# reasonable for just about everything else:
ENV[key] = str(value)
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
cmd_list, ignore, silent = self.process(target, map(rfile, source), env, executor)
# Use len() to filter out any "command" that's zero-length.
for cmd_line in filter(len, cmd_list):
# Escape the command line for the interpreter we are using.
cmd_line = escape_list(cmd_line, escape)
result = spawn(shell, escape, cmd_line[0], cmd_line, ENV)
if not ignore and result:
msg = "Error %s" % result
return SCons.Errors.BuildError(errstr=msg,
status=result,
action=self,
command=cmd_line)
return 0
def get_presig(self, target, source, env, executor=None):
"""Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string,
since those parts don't affect signatures.
"""
from SCons.Subst import SUBST_SIG
cmd = self.cmd_list
if is_List(cmd):
cmd = string.join(map(str, cmd))
else:
cmd = str(cmd)
if executor:
return env.subst_target_source(cmd, SUBST_SIG, executor=executor)
else:
return env.subst_target_source(cmd, SUBST_SIG, target, source)
def get_implicit_deps(self, target, source, env, executor=None):
icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True)
if is_String(icd) and icd[:1] == '$':
icd = env.subst(icd)
if not icd or icd in ('0', 'None'):
return []
from SCons.Subst import SUBST_SIG
if executor:
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, executor=executor)
else:
cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source)
res = []
for cmd_line in cmd_list:
if cmd_line:
d = str(cmd_line[0])
m = strip_quotes.match(d)
if m:
d = m.group(1)
d = env.WhereIs(d)
if d:
res.append(env.fs.File(d))
return res
class CommandGeneratorAction(ActionBase):
"""Class for command-generator actions."""
def __init__(self, generator, kw):
if __debug__: logInstanceCreation(self, 'Action.CommandGeneratorAction')
self.generator = generator
self.gen_kw = kw
self.varlist = kw.get('varlist', ())
self.targets = kw.get('targets', '$TARGETS')
def _generate(self, target, source, env, for_signature, executor=None):
# ensure that target is a list, to make it easier to write
# generator functions:
if not is_List(target):
target = [target]
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
ret = self.generator(target=target,
source=source,
env=env,
for_signature=for_signature)
#TODO(1.5) gen_cmd = Action(ret, **self.gen_kw)
gen_cmd = apply(Action, (ret,), self.gen_kw)
if not gen_cmd:
raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret))
return gen_cmd
def __str__(self):
try:
env = self.presub_env
except AttributeError:
env = None
if env is None:
env = SCons.Defaults.DefaultEnvironment()
act = self._generate([], [], env, 1)
return str(act)
def batch_key(self, env, target, source):
return self._generate(target, source, env, 1).batch_key(env, target, source)
def genstring(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).genstring(target, source, env)
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None):
act = self._generate(target, source, env, 0, executor)
if act is None:
raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source))))
return act(target, source, env, exitstatfunc, presub,
show, execute, chdir, executor)
def get_presig(self, target, source, env, executor=None):
"""Return the signature contents of this action's command line.
This strips $(-$) and everything in between the string,
since those parts don't affect signatures.
"""
return self._generate(target, source, env, 1, executor).get_presig(target, source, env)
def get_implicit_deps(self, target, source, env, executor=None):
return self._generate(target, source, env, 1, executor).get_implicit_deps(target, source, env)
def get_targets(self, env, executor):
return self._generate(None, None, env, 1, executor).get_targets(env, executor)
# A LazyAction is a kind of hybrid generator and command action for
# strings of the form "$VAR". These strings normally expand to other
# strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also
# want to be able to replace them with functions in the construction
# environment. Consequently, we want lazy evaluation and creation of
# an Action in the case of the function, but that's overkill in the more
# normal case of expansion to other strings.
#
# So we do this with a subclass that's both a generator *and*
# a command action. The overridden methods all do a quick check
# of the construction variable, and if it's a string we just call
# the corresponding CommandAction method to do the heavy lifting.
# If not, then we call the same-named CommandGeneratorAction method.
# The CommandGeneratorAction methods work by using the overridden
# _generate() method, that is, our own way of handling "generation" of
# an action based on what's in the construction variable.
class LazyAction(CommandGeneratorAction, CommandAction):
def __init__(self, var, kw):
if __debug__: logInstanceCreation(self, 'Action.LazyAction')
#FUTURE CommandAction.__init__(self, '${'+var+'}', **kw)
apply(CommandAction.__init__, (self, '${'+var+'}'), kw)
self.var = SCons.Util.to_String(var)
self.gen_kw = kw
def get_parent_class(self, env):
c = env.get(self.var)
if is_String(c) and not '\n' in c:
return CommandAction
return CommandGeneratorAction
def _generate_cache(self, env):
if env:
c = env.get(self.var, '')
else:
c = ''
#TODO(1.5) gen_cmd = Action(c, **self.gen_kw)
gen_cmd = apply(Action, (c,), self.gen_kw)
if not gen_cmd:
raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c)))
return gen_cmd
def _generate(self, target, source, env, for_signature, executor=None):
return self._generate_cache(env)
def __call__(self, target, source, env, *args, **kw):
args = (self, target, source, env) + args
c = self.get_parent_class(env)
#TODO(1.5) return c.__call__(*args, **kw)
return apply(c.__call__, args, kw)
def get_presig(self, target, source, env):
c = self.get_parent_class(env)
return c.get_presig(self, target, source, env)
class FunctionAction(_ActionAction):
"""Class for Python function actions."""
def __init__(self, execfunction, kw):
if __debug__: logInstanceCreation(self, 'Action.FunctionAction')
self.execfunction = execfunction
try:
self.funccontents = _callable_contents(execfunction)
except AttributeError:
try:
# See if execfunction will do the heavy lifting for us.
self.gc = execfunction.get_contents
except AttributeError:
# This is weird, just do the best we can.
self.funccontents = _object_contents(execfunction)
#TODO(1.5) _ActionAction.__init__(self, **kw)
apply(_ActionAction.__init__, (self,), kw)
def function_name(self):
try:
return self.execfunction.__name__
except AttributeError:
try:
return self.execfunction.__class__.__name__
except AttributeError:
return "unknown_python_function"
def strfunction(self, target, source, env, executor=None):
if self.cmdstr is None:
return None
if self.cmdstr is not _null:
from SCons.Subst import SUBST_RAW
if executor:
c = env.subst(self.cmdstr, SUBST_RAW, executor=executor)
else:
c = env.subst(self.cmdstr, SUBST_RAW, target, source)
if c:
return c
def array(a):
def quote(s):
try:
str_for_display = s.str_for_display
except AttributeError:
s = repr(s)
else:
s = str_for_display()
return s
return '[' + string.join(map(quote, a), ", ") + ']'
try:
strfunc = self.execfunction.strfunction
except AttributeError:
pass
else:
if strfunc is None:
return None
if callable(strfunc):
return strfunc(target, source, env)
name = self.function_name()
tstr = array(target)
sstr = array(source)
return "%s(%s, %s)" % (name, tstr, sstr)
def __str__(self):
name = self.function_name()
if name == 'ActionCaller':
return str(self.execfunction)
return "%s(target, source, env)" % name
def execute(self, target, source, env, executor=None):
exc_info = (None,None,None)
try:
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
rsources = map(rfile, source)
try:
result = self.execfunction(target=target, source=rsources, env=env)
except KeyboardInterrupt, e:
raise
except SystemExit, e:
raise
except Exception, e:
result = e
exc_info = sys.exc_info()
if result:
result = SCons.Errors.convert_to_BuildError(result, exc_info)
result.node=target
result.action=self
try:
result.command=self.strfunction(target, source, env, executor)
except TypeError:
result.command=self.strfunction(target, source, env)
# FIXME: This maintains backward compatibility with respect to
# which type of exceptions were returned by raising an
# exception and which ones were returned by value. It would
# probably be best to always return them by value here, but
# some codes do not check the return value of Actions and I do
# not have the time to modify them at this point.
if (exc_info[1] and
not isinstance(exc_info[1],EnvironmentError)):
raise result
return result
finally:
# Break the cycle between the traceback object and this
# function stack frame. See the sys.exc_info() doc info for
# more information about this issue.
del exc_info
def get_presig(self, target, source, env):
"""Return the signature contents of this callable action."""
try:
return self.gc(target, source, env)
except AttributeError:
return self.funccontents
def get_implicit_deps(self, target, source, env):
return []
class ListAction(ActionBase):
"""Class for lists of other actions."""
def __init__(self, list):
if __debug__: logInstanceCreation(self, 'Action.ListAction')
def list_of_actions(x):
if isinstance(x, ActionBase):
return x
return Action(x)
self.list = map(list_of_actions, list)
# our children will have had any varlist
# applied; we don't need to do it again
self.varlist = ()
self.targets = '$TARGETS'
def genstring(self, target, source, env):
return string.join(map(lambda a, t=target, s=source, e=env:
a.genstring(t, s, e),
self.list),
'\n')
def __str__(self):
return string.join(map(str, self.list), '\n')
def presub_lines(self, env):
return SCons.Util.flatten_sequence(
map(lambda a, env=env: a.presub_lines(env), self.list))
def get_presig(self, target, source, env):
"""Return the signature contents of this action list.
Simple concatenation of the signatures of the elements.
"""
return string.join(map(lambda x, t=target, s=source, e=env:
x.get_contents(t, s, e),
self.list),
"")
def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
show=_null, execute=_null, chdir=_null, executor=None):
if executor:
target = executor.get_all_targets()
source = executor.get_all_sources()
for act in self.list:
stat = act(target, source, env, exitstatfunc, presub,
show, execute, chdir, executor)
if stat:
return stat
return 0
def get_implicit_deps(self, target, source, env):
result = []
for act in self.list:
result.extend(act.get_implicit_deps(target, source, env))
return result
class ActionCaller:
"""A class for delaying calling an Action function with specific
(positional and keyword) arguments until the Action is actually
executed.
This class looks to the rest of the world like a normal Action object,
but what it's really doing is hanging on to the arguments until we
have a target, source and env to use for the expansion.
"""
def __init__(self, parent, args, kw):
self.parent = parent
self.args = args
self.kw = kw
def get_contents(self, target, source, env):
actfunc = self.parent.actfunc
try:
# "self.actfunc" is a function.
contents = str(actfunc.func_code.co_code)
except AttributeError:
# "self.actfunc" is a callable object.
try:
contents = str(actfunc.__call__.im_func.func_code.co_code)
except AttributeError:
# No __call__() method, so it might be a builtin
# or something like that. Do the best we can.
contents = str(actfunc)
contents = remove_set_lineno_codes(contents)
return contents
def subst(self, s, target, source, env):
# If s is a list, recursively apply subst()
# to every element in the list
if is_List(s):
result = []
for elem in s:
result.append(self.subst(elem, target, source, env))
return self.parent.convert(result)
# Special-case hack: Let a custom function wrapped in an
# ActionCaller get at the environment through which the action
# was called by using this hard-coded value as a special return.
if s == '$__env__':
return env
elif is_String(s):
return env.subst(s, 1, target, source)
return self.parent.convert(s)
def subst_args(self, target, source, env):
return map(lambda x, self=self, t=target, s=source, e=env:
self.subst(x, t, s, e),
self.args)
def subst_kw(self, target, source, env):
kw = {}
for key in self.kw.keys():
kw[key] = self.subst(self.kw[key], target, source, env)
return kw
def __call__(self, target, source, env, executor=None):
args = self.subst_args(target, source, env)
kw = self.subst_kw(target, source, env)
#TODO(1.5) return self.parent.actfunc(*args, **kw)
return apply(self.parent.actfunc, args, kw)
def strfunction(self, target, source, env):
args = self.subst_args(target, source, env)
kw = self.subst_kw(target, source, env)
#TODO(1.5) return self.parent.strfunc(*args, **kw)
return apply(self.parent.strfunc, args, kw)
def __str__(self):
#TODO(1.5) return self.parent.strfunc(*self.args, **self.kw)
return apply(self.parent.strfunc, self.args, self.kw)
class ActionFactory:
"""A factory class that will wrap up an arbitrary function
as an SCons-executable Action object.
The real heavy lifting here is done by the ActionCaller class.
We just collect the (positional and keyword) arguments that we're
called with and give them to the ActionCaller object we create,
so it can hang onto them until it needs them.
"""
def __init__(self, actfunc, strfunc, convert=lambda x: x):
self.actfunc = actfunc
self.strfunc = strfunc
self.convert = convert
def __call__(self, *args, **kw):
ac = ActionCaller(self, args, kw)
action = Action(ac, strfunction=ac.strfunction)
return action
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| {
"content_hash": "ad4c2c6d01ffe5a6b2a1cd0efa199436",
"timestamp": "",
"source": "github",
"line_count": 1240,
"max_line_length": 153,
"avg_line_length": 37.93709677419355,
"alnum_prop": 0.588006462310276,
"repo_name": "BenLand100/rat-pac",
"id": "953519460dee4b227f3352d3a9ceae9daeded398",
"size": "47042",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "python/SCons/Action.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4"
},
{
"name": "C",
"bytes": "7932"
},
{
"name": "C++",
"bytes": "1928816"
},
{
"name": "GLSL",
"bytes": "16656"
},
{
"name": "Makefile",
"bytes": "368"
},
{
"name": "Perl",
"bytes": "14460"
},
{
"name": "Python",
"bytes": "2027711"
},
{
"name": "Shell",
"bytes": "447"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/item/shared_item_place_setting_01.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "b89f40f9784a0a91cb8f48694a388c8d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 72,
"avg_line_length": 23.153846153846153,
"alnum_prop": 0.6877076411960132,
"repo_name": "anhstudios/swganh",
"id": "5c4985dc8f41de9e90af257f1a563a21eb203679",
"size": "446",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/static/item/shared_item_place_setting_01.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
"""
A function analysis module to detect and properly mark
microsoft hotpatch pads...
"""
import vivisect
def analyzeFunction(vw, funcva):
offset, bytes = vw.getByteDef(funcva)
ob = ord(bytes[offset-1])
if ob not in [0x90, 0xcc]:
return
count = 1
newb = ord(bytes[offset-count])
while newb == ob:
count += 1
newb = ord(bytes[offset-count])
count -= 1
va = funcva - count
if count >= 5 and not vw.isLocation(va):
vw.makePad(va, count)
| {
"content_hash": "cb368806f2648ec32a39cb51d11c30dd",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 54,
"avg_line_length": 19.576923076923077,
"alnum_prop": 0.6051080550098232,
"repo_name": "imjonsnooow/vivisect",
"id": "a42f765cb1df9c0e305c1a1814b4c5b47f7017ad",
"size": "510",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "vivisect/analysis/ms/hotpatch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "167795"
},
{
"name": "CSS",
"bytes": "15980"
},
{
"name": "Makefile",
"bytes": "355"
},
{
"name": "Python",
"bytes": "11397213"
},
{
"name": "Shell",
"bytes": "476"
}
],
"symlink_target": ""
} |
from ._models_py3 import Actor
from ._models_py3 import CallbackConfig
from ._models_py3 import EncryptionProperty
from ._models_py3 import Event
from ._models_py3 import EventContent
from ._models_py3 import EventInfo
from ._models_py3 import EventListResult
from ._models_py3 import EventRequestMessage
from ._models_py3 import EventResponseMessage
from ._models_py3 import ExportPipeline
from ._models_py3 import ExportPipelineListResult
from ._models_py3 import ExportPipelineTargetProperties
from ._models_py3 import IPRule
from ._models_py3 import IdentityProperties
from ._models_py3 import ImportImageParameters
from ._models_py3 import ImportPipeline
from ._models_py3 import ImportPipelineListResult
from ._models_py3 import ImportPipelineSourceProperties
from ._models_py3 import ImportSource
from ._models_py3 import ImportSourceCredentials
from ._models_py3 import KeyVaultProperties
from ._models_py3 import NetworkRuleSet
from ._models_py3 import OperationDefinition
from ._models_py3 import OperationDisplayDefinition
from ._models_py3 import OperationListResult
from ._models_py3 import OperationLogSpecificationDefinition
from ._models_py3 import OperationMetricSpecificationDefinition
from ._models_py3 import OperationServiceSpecificationDefinition
from ._models_py3 import PipelineRun
from ._models_py3 import PipelineRunListResult
from ._models_py3 import PipelineRunRequest
from ._models_py3 import PipelineRunResponse
from ._models_py3 import PipelineRunSourceProperties
from ._models_py3 import PipelineRunTargetProperties
from ._models_py3 import PipelineSourceTriggerDescriptor
from ._models_py3 import PipelineSourceTriggerProperties
from ._models_py3 import PipelineTriggerDescriptor
from ._models_py3 import PipelineTriggerProperties
from ._models_py3 import Policies
from ._models_py3 import PrivateEndpoint
from ._models_py3 import PrivateEndpointConnection
from ._models_py3 import PrivateEndpointConnectionListResult
from ._models_py3 import PrivateLinkResource
from ._models_py3 import PrivateLinkResourceListResult
from ._models_py3 import PrivateLinkServiceConnectionState
from ._models_py3 import ProgressProperties
from ._models_py3 import ProxyResource
from ._models_py3 import QuarantinePolicy
from ._models_py3 import RegenerateCredentialParameters
from ._models_py3 import Registry
from ._models_py3 import RegistryListCredentialsResult
from ._models_py3 import RegistryListResult
from ._models_py3 import RegistryNameCheckRequest
from ._models_py3 import RegistryNameStatus
from ._models_py3 import RegistryPassword
from ._models_py3 import RegistryUpdateParameters
from ._models_py3 import RegistryUsage
from ._models_py3 import RegistryUsageListResult
from ._models_py3 import Replication
from ._models_py3 import ReplicationListResult
from ._models_py3 import ReplicationUpdateParameters
from ._models_py3 import Request
from ._models_py3 import Resource
from ._models_py3 import RetentionPolicy
from ._models_py3 import Sku
from ._models_py3 import Source
from ._models_py3 import Status
from ._models_py3 import SystemData
from ._models_py3 import Target
from ._models_py3 import TrustPolicy
from ._models_py3 import UserIdentityProperties
from ._models_py3 import VirtualNetworkRule
from ._models_py3 import Webhook
from ._models_py3 import WebhookCreateParameters
from ._models_py3 import WebhookListResult
from ._models_py3 import WebhookUpdateParameters
from ._container_registry_management_client_enums import (
Action,
ActionsRequired,
ConnectionStatus,
CreatedByType,
DefaultAction,
EncryptionStatus,
ImportMode,
LastModifiedByType,
NetworkRuleBypassOptions,
PasswordName,
PipelineOptions,
PipelineRunSourceType,
PipelineRunTargetType,
PipelineSourceType,
PolicyStatus,
ProvisioningState,
PublicNetworkAccess,
RegistryUsageUnit,
ResourceIdentityType,
SkuName,
SkuTier,
TriggerStatus,
TrustPolicyType,
WebhookAction,
WebhookStatus,
)
__all__ = [
'Actor',
'CallbackConfig',
'EncryptionProperty',
'Event',
'EventContent',
'EventInfo',
'EventListResult',
'EventRequestMessage',
'EventResponseMessage',
'ExportPipeline',
'ExportPipelineListResult',
'ExportPipelineTargetProperties',
'IPRule',
'IdentityProperties',
'ImportImageParameters',
'ImportPipeline',
'ImportPipelineListResult',
'ImportPipelineSourceProperties',
'ImportSource',
'ImportSourceCredentials',
'KeyVaultProperties',
'NetworkRuleSet',
'OperationDefinition',
'OperationDisplayDefinition',
'OperationListResult',
'OperationLogSpecificationDefinition',
'OperationMetricSpecificationDefinition',
'OperationServiceSpecificationDefinition',
'PipelineRun',
'PipelineRunListResult',
'PipelineRunRequest',
'PipelineRunResponse',
'PipelineRunSourceProperties',
'PipelineRunTargetProperties',
'PipelineSourceTriggerDescriptor',
'PipelineSourceTriggerProperties',
'PipelineTriggerDescriptor',
'PipelineTriggerProperties',
'Policies',
'PrivateEndpoint',
'PrivateEndpointConnection',
'PrivateEndpointConnectionListResult',
'PrivateLinkResource',
'PrivateLinkResourceListResult',
'PrivateLinkServiceConnectionState',
'ProgressProperties',
'ProxyResource',
'QuarantinePolicy',
'RegenerateCredentialParameters',
'Registry',
'RegistryListCredentialsResult',
'RegistryListResult',
'RegistryNameCheckRequest',
'RegistryNameStatus',
'RegistryPassword',
'RegistryUpdateParameters',
'RegistryUsage',
'RegistryUsageListResult',
'Replication',
'ReplicationListResult',
'ReplicationUpdateParameters',
'Request',
'Resource',
'RetentionPolicy',
'Sku',
'Source',
'Status',
'SystemData',
'Target',
'TrustPolicy',
'UserIdentityProperties',
'VirtualNetworkRule',
'Webhook',
'WebhookCreateParameters',
'WebhookListResult',
'WebhookUpdateParameters',
'Action',
'ActionsRequired',
'ConnectionStatus',
'CreatedByType',
'DefaultAction',
'EncryptionStatus',
'ImportMode',
'LastModifiedByType',
'NetworkRuleBypassOptions',
'PasswordName',
'PipelineOptions',
'PipelineRunSourceType',
'PipelineRunTargetType',
'PipelineSourceType',
'PolicyStatus',
'ProvisioningState',
'PublicNetworkAccess',
'RegistryUsageUnit',
'ResourceIdentityType',
'SkuName',
'SkuTier',
'TriggerStatus',
'TrustPolicyType',
'WebhookAction',
'WebhookStatus',
]
| {
"content_hash": "75cf6ee2af3ce001544250584debeab4",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 64,
"avg_line_length": 31.6555023923445,
"alnum_prop": 0.7707073760580411,
"repo_name": "Azure/azure-sdk-for-python",
"id": "103eeff5f15a199d29badeb850b367b22011ccb8",
"size": "7084",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2019_12_01_preview/models/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import pytest, tempfile, os.path
import helper
import biobox_cli.command.run as biobox
def create_args(output):
return ["run",
"short_read_assembler",
"bioboxes/velvet",
"--input={}".format(helper.verification_file('short_read_assembler/genome_paired_reads.fq.gz')),
"--output={}".format(output),
"--no-rm"]
@pytest.mark.slow
def test_short_read_assembler():
path = tempfile.mkdtemp()
biobox.run(create_args(path))
expected = os.path.join(path, "contigs.fa")
assert os.path.isfile(expected)
| {
"content_hash": "832495f43e700209ca5c239c55367b6d",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 108,
"avg_line_length": 31.833333333333332,
"alnum_prop": 0.6335078534031413,
"repo_name": "michaelbarton/command-line-interface",
"id": "631719aa8b512ca4eaa3e862385cd9e05e487320",
"size": "573",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/biobox_type/test_short_read_assembly.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "41604"
},
{
"name": "Makefile",
"bytes": "2845"
},
{
"name": "Python",
"bytes": "34153"
},
{
"name": "Shell",
"bytes": "1007"
}
],
"symlink_target": ""
} |
import collections
from . import accessors, iterables
def is_ordered(container):
return isinstance(container, (collections.Sequence, collections.OrderedDict))
def iterkeys(container):
def _iterkeys(container):
if isinstance(container, collections.Sequence):
return iter(range(len(container)))
if isinstance(container, collections.Mapping):
return iter(container.keys())
return iter([])
result = _iterkeys(container)
return result if is_ordered(container) else sorted(result)
def itervalues(container):
for key in iterkeys(container):
yield accessors.getitem(container, key)
def iteritems(container):
for key in iterkeys(container):
yield key, accessors.getitem(container, key)
def index(container, key):
if isinstance(container, collections.Sequence):
return key
if isinstance(container, collections.Mapping):
return list(iterkeys(container)).index(key)
def key(container, index):
if isinstance(container, collections.Sequence):
return index
if isinstance(container, collections.Mapping):
return iterables.nth(iterkeys(container), index)
def insert(container, key, value):
if isinstance(container, collections.MutableMapping):
return accessors.setitem(container, key, value)
if isinstance(container, collections.MutableSequence):
try:
return accessors.setitem(container, key, value)
except IndexError:
return container.insert(key, value)
def remove(container, key):
return accessors.delitem(container, key)
| {
"content_hash": "b4a2e131fd480ef892f5809c20c0a335",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 81,
"avg_line_length": 28.928571428571427,
"alnum_prop": 0.7018518518518518,
"repo_name": "filonik/encore",
"id": "d50761d110997b68db94d6616b7b48d0b4fe7640",
"size": "1620",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "encore/containers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39833"
}
],
"symlink_target": ""
} |
import argparse
import os
import logging
from common import modelzoo
import mxnet as mx
from mxnet.contrib.quantization import *
def download_calib_dataset(dataset_url, calib_dataset, logger=None):
if logger is not None:
logger.info('Downloading calibration dataset from %s to %s' % (dataset_url, calib_dataset))
mx.test_utils.download(dataset_url, calib_dataset)
def download_model(model_name, logger=None):
dir_path = os.path.dirname(os.path.realpath(__file__))
model_path = os.path.join(dir_path, 'model')
if logger is not None:
logger.info('Downloading model %s... into path %s' % (model_name, model_path))
return modelzoo.download_model(args.model, os.path.join(dir_path, 'model'))
def save_symbol(fname, sym, logger=None):
if logger is not None:
logger.info('Saving symbol into file at %s' % fname)
sym.save(fname)
def save_params(fname, arg_params, aux_params, logger=None):
if logger is not None:
logger.info('Saving params into file at %s' % fname)
save_dict = {('arg:%s' % k): v.as_in_context(cpu()) for k, v in arg_params.items()}
save_dict.update({('aux:%s' % k): v.as_in_context(cpu()) for k, v in aux_params.items()})
mx.nd.save(fname, save_dict)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate a calibrated quantized model from a FP32 model')
parser.add_argument('--ctx', type=str, default='gpu')
parser.add_argument('--model', type=str, choices=['imagenet1k-resnet-152', 'imagenet1k-inception-bn'],
help='currently only supports imagenet1k-resnet-152 or imagenet1k-inception-bn')
parser.add_argument('--batch-size', type=int, default=32)
parser.add_argument('--label-name', type=str, default='softmax_label')
parser.add_argument('--calib-dataset', type=str, default='data/val_256_q90.rec',
help='path of the calibration dataset')
parser.add_argument('--image-shape', type=str, default='3,224,224')
parser.add_argument('--data-nthreads', type=int, default=60,
help='number of threads for data decoding')
parser.add_argument('--num-calib-batches', type=int, default=10,
help='number of batches for calibration')
parser.add_argument('--exclude-first-conv', action='store_true', default=True,
help='excluding quantizing the first conv layer since the'
' number of channels is usually not a multiple of 4 in that layer'
' which does not satisfy the requirement of cuDNN')
parser.add_argument('--shuffle-dataset', action='store_true', default=True,
help='shuffle the calibration dataset')
parser.add_argument('--shuffle-chunk-seed', type=int, default=3982304,
help='shuffling chunk seed, see'
' https://mxnet.incubator.apache.org/api/python/io/io.html?highlight=imager#mxnet.io.ImageRecordIter'
' for more details')
parser.add_argument('--shuffle-seed', type=int, default=48564309,
help='shuffling seed, see'
' https://mxnet.incubator.apache.org/api/python/io/io.html?highlight=imager#mxnet.io.ImageRecordIter'
' for more details')
parser.add_argument('--calib-mode', type=str, default='entropy',
help='calibration mode used for generating calibration table for the quantized symbol; supports'
' 1. none: no calibration will be used. The thresholds for quantization will be calculated'
' on the fly. This will result in inference speed slowdown and loss of accuracy'
' in general.'
' 2. naive: simply take min and max values of layer outputs as thresholds for'
' quantization. In general, the inference accuracy worsens with more examples used in'
' calibration. It is recommended to use `entropy` mode as it produces more accurate'
' inference results.'
' 3. entropy: calculate KL divergence of the fp32 output and quantized output for optimal'
' thresholds. This mode is expected to produce the best inference accuracy of all three'
' kinds of quantized models if the calibration dataset is representative enough of the'
' inference dataset.')
parser.add_argument('--quantized-dtype', type=str, default='int8',
choices=['int8', 'uint8'],
help='quantization destination data type for input data')
args = parser.parse_args()
if args.ctx == 'gpu':
ctx = mx.gpu(0)
elif args.ctx == 'cpu':
ctx = mx.cpu(0)
else:
raise ValueError('ctx %s is not supported in this script' % args.ctx)
logging.basicConfig()
logger = logging.getLogger('logger')
logger.setLevel(logging.INFO)
logger.info('shuffle_dataset=%s' % args.shuffle_dataset)
calib_mode = args.calib_mode
logger.info('calibration mode set to %s' % calib_mode)
# download calibration dataset
if calib_mode != 'none':
download_calib_dataset('http://data.mxnet.io/data/val_256_q90.rec', args.calib_dataset)
# download model
prefix, epoch = download_model(model_name=args.model, logger=logger)
sym, arg_params, aux_params = mx.model.load_checkpoint(prefix, epoch)
# get batch size
batch_size = args.batch_size
logger.info('batch size = %d for calibration' % batch_size)
# get number of batches for calibration
num_calib_batches = args.num_calib_batches
if calib_mode != 'none':
logger.info('number of batches = %d for calibration' % num_calib_batches)
# get number of threads for decoding the dataset
data_nthreads = args.data_nthreads
# get image shape
image_shape = args.image_shape
exclude_first_conv = args.exclude_first_conv
excluded_sym_names = []
if args.model == 'imagenet1k-resnet-152':
rgb_mean = '0,0,0'
if args.ctx == 'gpu':
calib_layer = lambda name: name.endswith('_output') and (name.find('conv') != -1
or name.find('sc') != -1
or name.find('fc') != -1)
else:
calib_layer = lambda name: name.endswith('_output') and (name.find('conv') != -1
or name.find('sc') != -1)
excluded_sym_names += ['flatten0', 'fc1']
if exclude_first_conv:
excluded_sym_names += ['conv0']
elif args.model == 'imagenet1k-inception-bn':
rgb_mean = '123.68,116.779,103.939'
if args.ctx == 'gpu':
calib_layer = lambda name: name.endswith('_output') and (name.find('conv') != -1
or name.find('fc') != -1)
else:
calib_layer = lambda name: name.endswith('_output') and (name.find('conv') != -1)
excluded_sym_names += ['flatten', 'fc1']
if exclude_first_conv:
excluded_sym_names += ['conv_1']
else:
raise ValueError('model %s is not supported in this script' % args.model)
label_name = args.label_name
logger.info('label_name = %s' % label_name)
data_shape = tuple([int(i) for i in image_shape.split(',')])
logger.info('Input data shape = %s' % str(data_shape))
logger.info('rgb_mean = %s' % rgb_mean)
rgb_mean = [float(i) for i in rgb_mean.split(',')]
mean_args = {'mean_r': rgb_mean[0], 'mean_g': rgb_mean[1], 'mean_b': rgb_mean[2]}
if calib_mode == 'none':
logger.info('Quantizing FP32 model %s' % args.model)
qsym, qarg_params, aux_params = quantize_model(sym=sym, arg_params=arg_params, aux_params=aux_params,
ctx=ctx, excluded_sym_names=excluded_sym_names,
calib_mode=calib_mode, quantized_dtype=args.quantized_dtype,
logger=logger)
sym_name = '%s-symbol.json' % (prefix + '-quantized')
save_symbol(sym_name, qsym, logger)
else:
logger.info('Creating ImageRecordIter for reading calibration dataset')
data = mx.io.ImageRecordIter(path_imgrec=args.calib_dataset,
label_width=1,
preprocess_threads=data_nthreads,
batch_size=batch_size,
data_shape=data_shape,
label_name=label_name,
rand_crop=False,
rand_mirror=False,
shuffle=args.shuffle_dataset,
shuffle_chunk_seed=args.shuffle_chunk_seed,
seed=args.shuffle_seed,
**mean_args)
cqsym, qarg_params, aux_params = quantize_model(sym=sym, arg_params=arg_params, aux_params=aux_params,
ctx=ctx, excluded_sym_names=excluded_sym_names,
calib_mode=calib_mode, calib_data=data,
num_calib_examples=num_calib_batches * batch_size,
calib_layer=calib_layer, quantized_dtype=args.quantized_dtype,
logger=logger)
if calib_mode == 'entropy':
suffix = '-quantized-%dbatches-entropy' % num_calib_batches
elif calib_mode == 'naive':
suffix = '-quantized-%dbatches-naive' % num_calib_batches
else:
raise ValueError('unknow calibration mode %s received, only supports `none`, `naive`, and `entropy`'
% calib_mode)
sym_name = '%s-symbol.json' % (prefix + suffix)
save_symbol(sym_name, cqsym, logger)
param_name = '%s-%04d.params' % (prefix + '-quantized', epoch)
save_params(param_name, qarg_params, aux_params, logger)
| {
"content_hash": "ae5bb57bda9123d62289a86a09f894ed",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 130,
"avg_line_length": 53.311557788944725,
"alnum_prop": 0.5564143651616552,
"repo_name": "mbaijal/incubator-mxnet",
"id": "8a2818c4bca0b0326fa781688fd52b92e869895e",
"size": "11395",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "example/quantization/imagenet_gen_qsym.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1731"
},
{
"name": "Batchfile",
"bytes": "13130"
},
{
"name": "C",
"bytes": "173224"
},
{
"name": "C++",
"bytes": "6116511"
},
{
"name": "CMake",
"bytes": "86446"
},
{
"name": "Clojure",
"bytes": "389028"
},
{
"name": "Cuda",
"bytes": "813783"
},
{
"name": "Dockerfile",
"bytes": "43395"
},
{
"name": "Groovy",
"bytes": "22850"
},
{
"name": "Java",
"bytes": "128595"
},
{
"name": "Julia",
"bytes": "408765"
},
{
"name": "Jupyter Notebook",
"bytes": "1657933"
},
{
"name": "MATLAB",
"bytes": "34903"
},
{
"name": "Makefile",
"bytes": "70735"
},
{
"name": "Perl",
"bytes": "1535873"
},
{
"name": "Perl 6",
"bytes": "7280"
},
{
"name": "PowerShell",
"bytes": "6150"
},
{
"name": "Python",
"bytes": "6206547"
},
{
"name": "R",
"bytes": "351354"
},
{
"name": "Scala",
"bytes": "1102749"
},
{
"name": "Shell",
"bytes": "305673"
},
{
"name": "Smalltalk",
"bytes": "43774"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from event.models import Event
class EventArtistsInline(admin.TabularInline):
model = Event.artists.through
verbose_name_plural = 'Artists'
extra = 1
class EventInline(admin.StackedInline):
model = Event
extra = 1
class EventAdmin(admin.ModelAdmin):
inlines = [EventArtistsInline]
list_display = ['name', 'start', 'venue']
list_filter = ['name', 'start', 'venue__name']
search_fields = ['name', 'start', 'venue__name']
admin.site.register(Event, EventAdmin)
| {
"content_hash": "1d31414cf4471efcf14874b8d9ac31e7",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 52,
"avg_line_length": 22.333333333333332,
"alnum_prop": 0.6884328358208955,
"repo_name": "FedorSelitsky/eventrack",
"id": "72f1eb80106cbf5bb10fef69c121d903a7a2dd83",
"size": "536",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "event/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16128"
},
{
"name": "Dockerfile",
"bytes": "1061"
},
{
"name": "HTML",
"bytes": "62582"
},
{
"name": "JavaScript",
"bytes": "46270"
},
{
"name": "Python",
"bytes": "47384"
},
{
"name": "Shell",
"bytes": "127"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.contrib import messages
from django.db.models import Q
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.views.generic import (
DeleteView, DetailView, FormView, ListView, UpdateView)
from django.views.generic.detail import SingleObjectMixin
from django.views.generic.edit import FormMixin
from django_tables2 import SingleTableView
from oscar.core.compat import get_user_model
from oscar.core.loading import get_class, get_classes, get_model
from oscar.views.generic import BulkEditMixin
UserSearchForm, ProductAlertSearchForm, ProductAlertUpdateForm = get_classes(
'dashboard.users.forms', ('UserSearchForm', 'ProductAlertSearchForm',
'ProductAlertUpdateForm'))
PasswordResetForm = get_class('customer.forms', 'PasswordResetForm')
UserTable = get_class('dashboard.users.tables', 'UserTable')
ProductAlert = get_model('customer', 'ProductAlert')
User = get_user_model()
class IndexView(BulkEditMixin, FormMixin, SingleTableView):
template_name = 'oscar/dashboard/users/index.html'
model = User
actions = ('make_active', 'make_inactive', )
form_class = UserSearchForm
table_class = UserTable
context_table_name = 'users'
desc_template = _('%(main_filter)s %(email_filter)s %(name_filter)s')
description = ''
def dispatch(self, request, *args, **kwargs):
form_class = self.get_form_class()
self.form = self.get_form(form_class)
return super().dispatch(request, *args, **kwargs)
def get_table_pagination(self, table):
return dict(per_page=settings.OSCAR_DASHBOARD_ITEMS_PER_PAGE)
def get_form_kwargs(self):
"""
Only bind search form if it was submitted.
"""
kwargs = super().get_form_kwargs()
if 'search' in self.request.GET:
kwargs.update({
'data': self.request.GET,
})
return kwargs
def get_queryset(self):
queryset = self.model.objects.all().order_by('-date_joined')
return self.apply_search(queryset)
def apply_search(self, queryset):
# Set initial queryset description, used for template context
self.desc_ctx = {
'main_filter': _('All users'),
'email_filter': '',
'name_filter': '',
}
if self.form.is_valid():
return self.apply_search_filters(queryset, self.form.cleaned_data)
else:
return queryset
def apply_search_filters(self, queryset, data):
"""
Function is split out to allow customisation with little boilerplate.
"""
if data['email']:
email = data['email']
queryset = queryset.filter(email__istartswith=email)
self.desc_ctx['email_filter'] \
= _(" with email matching '%s'") % email
if data['name']:
# If the value is two words, then assume they are first name and
# last name
parts = data['name'].split()
# always true filter
condition = Q()
for part in parts:
condition &= Q(first_name__icontains=part) \
| Q(last_name__icontains=part)
queryset = queryset.filter(condition).distinct()
self.desc_ctx['name_filter'] \
= _(" with name matching '%s'") % data['name']
return queryset
def get_table(self, **kwargs):
table = super().get_table(**kwargs)
table.caption = self.desc_template % self.desc_ctx
return table
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form'] = self.form
return context
def make_inactive(self, request, users):
return self._change_users_active_status(users, False)
def make_active(self, request, users):
return self._change_users_active_status(users, True)
def _change_users_active_status(self, users, value):
for user in users:
if not user.is_superuser:
user.is_active = value
user.save()
messages.info(self.request, _("Users' status successfully changed"))
return redirect('dashboard:users-index')
class UserDetailView(DetailView):
template_name = 'oscar/dashboard/users/detail.html'
model = User
context_object_name = 'customer'
class PasswordResetView(SingleObjectMixin, FormView):
form_class = PasswordResetForm
http_method_names = ['post']
model = User
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super().post(request, *args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['data'] = {'email': self.object.email}
return kwargs
def form_valid(self, form):
# The PasswordResetForm's save method sends the reset email
form.save(request=self.request)
return super().form_valid(form)
def get_success_url(self):
messages.success(
self.request, _("A password reset email has been sent"))
return reverse(
'dashboard:user-detail', kwargs={'pk': self.object.id}
)
class ProductAlertListView(ListView):
model = ProductAlert
form_class = ProductAlertSearchForm
context_object_name = 'alerts'
template_name = 'oscar/dashboard/users/alerts/list.html'
paginate_by = settings.OSCAR_DASHBOARD_ITEMS_PER_PAGE
base_description = _('All Alerts')
description = ''
def get_queryset(self):
queryset = self.model.objects.all().order_by('-date_created')
self.description = self.base_description
self.form = self.form_class(self.request.GET)
if not self.form.is_valid():
return queryset
data = self.form.cleaned_data
if data['status']:
queryset = queryset.filter(status=data['status'])
self.description \
+= _(" with status matching '%s'") % data['status']
if data['name']:
# If the value is two words, then assume they are first name and
# last name
parts = data['name'].split()
if len(parts) >= 2:
queryset = queryset.filter(
user__first_name__istartswith=parts[0],
user__last_name__istartswith=parts[1]
).distinct()
else:
queryset = queryset.filter(
Q(user__first_name__istartswith=parts[0])
| Q(user__last_name__istartswith=parts[-1])
).distinct()
self.description \
+= _(" with customer name matching '%s'") % data['name']
if data['email']:
queryset = queryset.filter(
Q(user__email__icontains=data['email'])
| Q(email__icontains=data['email'])
)
self.description \
+= _(" with customer email matching '%s'") % data['email']
return queryset
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['form'] = self.form
context['queryset_description'] = self.description
return context
class ProductAlertUpdateView(UpdateView):
template_name = 'oscar/dashboard/users/alerts/update.html'
model = ProductAlert
form_class = ProductAlertUpdateForm
context_object_name = 'alert'
def get_success_url(self):
messages.success(self.request, _("Product alert saved"))
return reverse('dashboard:user-alert-list')
class ProductAlertDeleteView(DeleteView):
model = ProductAlert
template_name = 'oscar/dashboard/users/alerts/delete.html'
context_object_name = 'alert'
def get_success_url(self):
messages.warning(self.request, _("Product alert deleted"))
return reverse('dashboard:user-alert-list')
| {
"content_hash": "9a7bf34e94cf099eb18cb5f476b33221",
"timestamp": "",
"source": "github",
"line_count": 231,
"max_line_length": 78,
"avg_line_length": 34.98268398268398,
"alnum_prop": 0.6120529637421112,
"repo_name": "solarissmoke/django-oscar",
"id": "8518be17df27654729330f828b88cecfab62deb5",
"size": "8081",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/oscar/apps/dashboard/users/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "529"
},
{
"name": "HTML",
"bytes": "562906"
},
{
"name": "JavaScript",
"bytes": "40879"
},
{
"name": "Makefile",
"bytes": "4234"
},
{
"name": "Python",
"bytes": "2199293"
},
{
"name": "SCSS",
"bytes": "21362"
},
{
"name": "Shell",
"bytes": "308"
}
],
"symlink_target": ""
} |
"""Guard and reader functions for dealing with device CSV"""
from device_resolutions.util import find_lowest, as_float
def DeviceRow((model, width_dp, height_dp, width_px, height_px, screen_in, is_touch)):
"""Guard/validation function for device rows"""
return (
str(model),
as_float(width_dp),
as_float(height_dp),
as_float(width_px),
as_float(height_px),
as_float(screen_in),
bool(is_touch)
)
def model(row):
return row[0]
def width_dp(row):
return row[1]
def height_dp(row):
return row[2]
def width_px(row):
return row[3]
def height_px(row):
return row[4]
def screen_in(row):
return row[5]
def is_touch(row):
return row[6] | {
"content_hash": "e85a313e2fd1b58fc4f14f6d043fd9d3",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 86,
"avg_line_length": 17.523809523809526,
"alnum_prop": 0.6141304347826086,
"repo_name": "gordonbrander/device_resolutions",
"id": "d4cc8b834192c3b83550d1d17d141e5327a6ff4a",
"size": "736",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "device_resolutions/devicerow.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4524"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, unicode_literals, division
from time import sleep
# This file is part of the ISIS IBEX application.
# Copyright (C) 2012-2016 Science & Technology Facilities Council.
# All rights reserved.
#
# This program is distributed in the hope that it will be useful.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License v1.0 which accompanies this distribution.
# EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
# AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
# You should have received a copy of the Eclipse Public License v1.0
# along with this program; if not, you can obtain a copy from
# https://www.eclipse.org/org/documents/epl-v10.php or
# http://opensource.org/licenses/eclipse-1.0.php
from BlockServer.core.macros import MACROS
from server_common.utilities import print_and_log
from concurrent.futures import ThreadPoolExecutor
# Number of threads to serve caputs
NUMBER_OF_CAPUT_THREADS = 20
try:
from genie_python.channel_access_exceptions import UnableToConnectToPVException, ReadAccessException
except ImportError:
class UnableToConnectToPVException(IOError):
"""
The system is unable to connect to a PV for some reason.
"""
def __init__(self, pv_name, err):
super(UnableToConnectToPVException, self).__init__("Unable to connect to PV {0}: {1}".format(pv_name, err))
class ReadAccessException(IOError):
"""
PV exists but its value is unavailable to read.
"""
def __init__(self, pv_name):
super(ReadAccessException, self).__init__("Read access denied for PV {}".format(pv_name))
try:
# noinspection PyUnresolvedReferences
from genie_python.genie_cachannel_wrapper import CaChannelWrapper, EXIST_TIMEOUT
except ImportError:
print("ERROR: No genie_python on the system can not import CaChannelWrapper!")
try:
from genie_python.genie_cachannel_wrapper import AlarmSeverity, AlarmCondition as AlarmStatus
except ImportError:
from enum import IntEnum
class AlarmSeverity(IntEnum):
"""
Enum for severity of alarm
"""
No = 0
Minor = 1
Major = 2
Invalid = 3
class AlarmStatus(IntEnum):
"""
Enum for status of alarm
"""
BadSub = 16
Calc = 12
Comm = 9
Cos = 8
Disable = 18
High = 4
HiHi = 3
HwLimit = 11
Link = 14
Lolo = 5
Low = 6
No = 0
Read = 1
ReadAccess = 20
Scam = 13
Simm = 19
Soft = 15
State = 7
Timeout = 10
UDF = 17
Write = 2
WriteAccess = 21
def _create_caput_pool():
"""
Returns: thread pool for the caputs, making sure it works for older versions of python
"""
try:
executor = ThreadPoolExecutor(max_workers=NUMBER_OF_CAPUT_THREADS, thread_name_prefix="ChannelAccess_Pool")
except TypeError:
executor = ThreadPoolExecutor(max_workers=NUMBER_OF_CAPUT_THREADS)
print("WARNING: thread_name_prefix does not exist for ThreadPoolExecutor in this python, "
"caput pool has generic name.")
return executor
class ChannelAccess(object):
# Create a thread poll so that threads are reused and so ca contexts that each thread gets are shared. This also
# caps the number of ca library threads. 20 is chosen as being probably enough but limited.
thread_pool = _create_caput_pool()
@staticmethod
def wait_for_tasks():
"""
Wait for all requested tasks to complete, i.e. all caputs.
It does this by shutting down the current threadpool waiting for all tasks to complete and then create a new
pool.
"""
ChannelAccess.thread_pool.shutdown()
ChannelAccess.thread_pool = _create_caput_pool()
@staticmethod
def caget(name, as_string=False, timeout=None):
"""Uses CaChannelWrapper from genie_python to get a pv value. We import CaChannelWrapper when used as this means
the tests can run without having genie_python installed
Args:
name (string): The name of the PV to be read
as_string (bool, optional): Set to read a char array as a string, defaults to false
timeout (float, None): timeout value to use; None for use default timeout
Returns:
obj : The value of the requested PV, None if no value was read
"""
try:
if timeout is None:
return CaChannelWrapper.get_pv_value(name, as_string)
else:
return CaChannelWrapper.get_pv_value(name, as_string, timeout=timeout)
except Exception as err:
# Probably has timed out
print_and_log(str(err))
return None
@staticmethod
def caput(name, value, wait=False, set_pv_value=None, safe_not_quick=True):
"""
Uses CaChannelWrapper from genie_python to set a pv value. Waiting will put the call in a thread so the order
is no longer guarenteed. Also if the call take time a queue will be formed of put tasks.
We import CaChannelWrapper when used as this means the tests can run without having genie_python installed
Args:
name (string): The name of the PV to be set
value (object): The data to send to the PV
wait (bool, optional): Wait for the PV to set before returning
set_pv_value: function to call to set a pv, used only in testing; None to use CaChannelWrapper set value
safe_not_quick (bool): True run all checks while setting the pv, False don't run checks just write the value,
e.g. disp check
Returns:
None: if wait is False
Future: if wait if True
"""
if set_pv_value is None:
# We need to put the default here rather than as a python default argument because the linux build does
# not have CaChannelWrapper. The argument default would be looked up at class load time, causing the
# linux build to fail to load the entire class.
set_pv_value = CaChannelWrapper.set_pv_value
def _put_value():
set_pv_value(name, value, wait, safe_not_quick=safe_not_quick)
if wait:
# If waiting then run in this thread.
_put_value()
return None
else:
# If not waiting, run in a different thread.
# Even if not waiting genie_python sometimes takes a while to return from a set_pv_value call.
return ChannelAccess.thread_pool.submit(_put_value)
@staticmethod
def caput_retry_on_fail(pv_name, value, retry_count=5, safe_not_quick=True):
"""
Write to a pv and check the value is set, retry if not; raise if run out of retries
Args:
pv_name: pv name to write to
value: value to write
retry_count: number of retries
safe_not_quick (bool): True run all checks while setting the pv, False don't run checks just write the value,
e.g. disp check
Raises:
IOError: if pv can not be set
"""
current_value = None
for _ in range(retry_count):
ChannelAccess.caput(pv_name, value, wait=True, safe_not_quick=safe_not_quick)
current_value = ChannelAccess.caget(pv_name)
if current_value == value:
break
else:
raise IOError("PV value can not be set, pv {}, was {} expected {}".format(pv_name, current_value, value))
@staticmethod
def pv_exists(name, timeout=None):
"""
See if the PV exists.
Args:
name (string): The PV name.
timeout(optional): How long to wait for the PV to "appear".
Returns:
True if exists, otherwise False.
"""
if timeout is None:
timeout = EXIST_TIMEOUT
return CaChannelWrapper.pv_exists(name, timeout)
@staticmethod
def add_monitor(name, call_back_function):
"""
Add a callback to a pv which responds on a monitor (i.e. value change). This currently only tested for
numbers.
Args:
name: name of the pv
call_back_function: the callback function, arguments are value,
alarm severity (AlarmSeverity),
alarm status (AlarmStatus)
"""
CaChannelWrapper.add_monitor(name, call_back_function)
@staticmethod
def poll():
"""
Flush the send buffer and execute any outstanding background activity for all connected pvs.
NB Connected pv is one which is in the cache
"""
CaChannelWrapper.poll()
@staticmethod
def clear_monitor(name):
"""
Clears the monitor on a pv if it exists
"""
try:
CaChannelWrapper.get_chan(name).clear_channel()
except UnableToConnectToPVException:
pass
class ManagerModeRequiredException(Exception):
"""
Exception to be thrown if manager mode was required, but not enabled, for an operation.
"""
def __init__(self, *args, **kwargs):
super(ManagerModeRequiredException, self).__init__(*args, **kwargs)
def verify_manager_mode(channel_access=ChannelAccess(), message="Operation must be performed in manager mode"):
"""
Verifies that manager mode is active, throwing an error if it was not active.
Args:
channel_access (ChannelAccess, optional): the channel access class to use
message (str): Message given to exception if manager mode was not enabled.
Raises:
ManagerModeRequiredException: if manager mode was not enabled or was unable to connect
"""
try:
is_manager = channel_access.caget("{}CS:MANAGER".format(MACROS["$(MYPVPREFIX)"])).lower() == "yes"
except UnableToConnectToPVException as e:
raise ManagerModeRequiredException("Manager mode is required, but the manager mode PV did not connect "
"(caused by: {})".format(e))
except ReadAccessException as e:
raise ManagerModeRequiredException("Manager mode is required, but the manager mode PV could not be read "
"(caused by: {})".format(e))
except Exception as e:
raise ManagerModeRequiredException("Manager mode is required, but an unknown exception occurred "
"(caused by: {})".format(e))
if not is_manager:
raise ManagerModeRequiredException(message)
def maximum_severity(*alarms):
"""
Get the alarm with maximum severity (or first if items have equal severity)
Args:
*alarms (Tuple[AlarmSeverity, AlarmStatus]): alarms to choose from
Returns:
(Optional[Tuple[AlarmSeverity, AlarmStatus]]) alarm with maximum severity; none for no arguments
"""
maximum_severity_alarm = None
for alarm in alarms:
if maximum_severity_alarm is None or alarm[0] > maximum_severity_alarm[0]:
maximum_severity_alarm = alarm
return maximum_severity_alarm
| {
"content_hash": "b34d8769e71c386afe80b076ed6c554b",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 121,
"avg_line_length": 37.46078431372549,
"alnum_prop": 0.6346506150222455,
"repo_name": "ISISComputingGroup/EPICS-inst_servers",
"id": "bb0a2142a15ac5c6ddab6178e1e0117e7d8145c0",
"size": "11463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server_common/channel_access.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6451"
},
{
"name": "Python",
"bytes": "1060148"
},
{
"name": "Shell",
"bytes": "4460"
}
],
"symlink_target": ""
} |
"""Wrappers for protocol buffer enum types."""
import enum
class RecognitionConfig(object):
class AudioEncoding(enum.IntEnum):
"""
The encoding of the audio data sent in the request.
All encodings support only 1 channel (mono) audio, unless the
``audio_channel_count`` and ``enable_separate_recognition_per_channel``
fields are set.
For best results, the audio source should be captured and transmitted
using a lossless encoding (``FLAC`` or ``LINEAR16``). The accuracy of
the speech recognition can be reduced if lossy codecs are used to
capture or transmit audio, particularly if background noise is present.
Lossy codecs include ``MULAW``, ``AMR``, ``AMR_WB``, ``OGG_OPUS``,
``SPEEX_WITH_HEADER_BYTE``, and ``MP3``.
The ``FLAC`` and ``WAV`` audio file formats include a header that
describes the included audio content. You can request recognition for
``WAV`` files that contain either ``LINEAR16`` or ``MULAW`` encoded
audio. If you send ``FLAC`` or ``WAV`` audio file format in your
request, you do not need to specify an ``AudioEncoding``; the audio
encoding format is determined from the file header. If you specify an
``AudioEncoding`` when you send send ``FLAC`` or ``WAV`` audio, the
encoding configuration must match the encoding described in the audio
header; otherwise the request returns an
``google.rpc.Code.INVALID_ARGUMENT`` error code.
Attributes:
ENCODING_UNSPECIFIED (int): Not specified.
LINEAR16 (int): Uncompressed 16-bit signed little-endian samples (Linear PCM).
FLAC (int): ``FLAC`` (Free Lossless Audio Codec) is the recommended encoding because
it is lossless--therefore recognition is not compromised--and requires
only about half the bandwidth of ``LINEAR16``. ``FLAC`` stream encoding
supports 16-bit and 24-bit samples, however, not all fields in
``STREAMINFO`` are supported.
MULAW (int): 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law.
AMR (int): Adaptive Multi-Rate Narrowband codec. ``sample_rate_hertz`` must be
8000.
AMR_WB (int): Adaptive Multi-Rate Wideband codec. ``sample_rate_hertz`` must be 16000.
OGG_OPUS (int): Opus encoded audio frames in Ogg container
(`OggOpus <https://wiki.xiph.org/OggOpus>`__). ``sample_rate_hertz``
must be one of 8000, 12000, 16000, 24000, or 48000.
SPEEX_WITH_HEADER_BYTE (int): Although the use of lossy encodings is not recommended, if a very low
bitrate encoding is required, ``OGG_OPUS`` is highly preferred over
Speex encoding. The `Speex <https://speex.org/>`__ encoding supported by
Cloud Speech API has a header byte in each block, as in MIME type
``audio/x-speex-with-header-byte``. It is a variant of the RTP Speex
encoding defined in `RFC 5574 <https://tools.ietf.org/html/rfc5574>`__.
The stream is a sequence of blocks, one block per RTP packet. Each block
starts with a byte containing the length of the block, in bytes,
followed by one or more frames of Speex data, padded to an integral
number of bytes (octets) as specified in RFC 5574. In other words, each
RTP header is replaced with a single byte containing the block length.
Only Speex wideband is supported. ``sample_rate_hertz`` must be 16000.
"""
ENCODING_UNSPECIFIED = 0
LINEAR16 = 1
FLAC = 2
MULAW = 3
AMR = 4
AMR_WB = 5
OGG_OPUS = 6
SPEEX_WITH_HEADER_BYTE = 7
class RecognitionMetadata(object):
class InteractionType(enum.IntEnum):
"""
Use case categories that the audio recognition request can be described
by.
Attributes:
INTERACTION_TYPE_UNSPECIFIED (int): Use case is either unknown or is something other than one of the other
values below.
DISCUSSION (int): Multiple people in a conversation or discussion. For example in a
meeting with two or more people actively participating. Typically all
the primary people speaking would be in the same room (if not, see
PHONE\_CALL)
PRESENTATION (int): One or more persons lecturing or presenting to others, mostly
uninterrupted.
PHONE_CALL (int): A phone-call or video-conference in which two or more people, who are
not in the same room, are actively participating.
VOICEMAIL (int): A recorded message intended for another person to listen to.
PROFESSIONALLY_PRODUCED (int): Professionally produced audio (eg. TV Show, Podcast).
VOICE_SEARCH (int): Transcribe spoken questions and queries into text.
VOICE_COMMAND (int): Transcribe voice commands, such as for controlling a device.
DICTATION (int): Transcribe speech to text to create a written document, such as a
text-message, email or report.
"""
INTERACTION_TYPE_UNSPECIFIED = 0
DISCUSSION = 1
PRESENTATION = 2
PHONE_CALL = 3
VOICEMAIL = 4
PROFESSIONALLY_PRODUCED = 5
VOICE_SEARCH = 6
VOICE_COMMAND = 7
DICTATION = 8
class MicrophoneDistance(enum.IntEnum):
"""
Enumerates the types of capture settings describing an audio file.
Attributes:
MICROPHONE_DISTANCE_UNSPECIFIED (int): Audio type is not known.
NEARFIELD (int): The audio was captured from a closely placed microphone. Eg. phone,
dictaphone, or handheld microphone. Generally if there speaker is within
1 meter of the microphone.
MIDFIELD (int): The speaker if within 3 meters of the microphone.
FARFIELD (int): The speaker is more than 3 meters away from the microphone.
"""
MICROPHONE_DISTANCE_UNSPECIFIED = 0
NEARFIELD = 1
MIDFIELD = 2
FARFIELD = 3
class OriginalMediaType(enum.IntEnum):
"""
The original media the speech was recorded on.
Attributes:
ORIGINAL_MEDIA_TYPE_UNSPECIFIED (int): Unknown original media type.
AUDIO (int): The speech data is an audio recording.
VIDEO (int): The speech data originally recorded on a video.
"""
ORIGINAL_MEDIA_TYPE_UNSPECIFIED = 0
AUDIO = 1
VIDEO = 2
class RecordingDeviceType(enum.IntEnum):
"""
The type of device the speech was recorded with.
Attributes:
RECORDING_DEVICE_TYPE_UNSPECIFIED (int): The recording device is unknown.
SMARTPHONE (int): Speech was recorded on a smartphone.
PC (int): Speech was recorded using a personal computer or tablet.
PHONE_LINE (int): Speech was recorded over a phone line.
VEHICLE (int): Speech was recorded in a vehicle.
OTHER_OUTDOOR_DEVICE (int): Speech was recorded outdoors.
OTHER_INDOOR_DEVICE (int): Speech was recorded indoors.
"""
RECORDING_DEVICE_TYPE_UNSPECIFIED = 0
SMARTPHONE = 1
PC = 2
PHONE_LINE = 3
VEHICLE = 4
OTHER_OUTDOOR_DEVICE = 5
OTHER_INDOOR_DEVICE = 6
class StreamingRecognizeResponse(object):
class SpeechEventType(enum.IntEnum):
"""
Indicates the type of speech event.
Attributes:
SPEECH_EVENT_UNSPECIFIED (int): No speech event specified.
END_OF_SINGLE_UTTERANCE (int): This event indicates that the server has detected the end of the user's
speech utterance and expects no additional speech. Therefore, the server
will not process additional audio (although it may subsequently return
additional results). The client should stop sending additional audio
data, half-close the gRPC connection, and wait for any additional
results until the server closes the gRPC connection. This event is only
sent if ``single_utterance`` was set to ``true``, and is not used
otherwise.
"""
SPEECH_EVENT_UNSPECIFIED = 0
END_OF_SINGLE_UTTERANCE = 1
| {
"content_hash": "e93bc93e3982d29a7bcbb806fc3401be",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 116,
"avg_line_length": 46.166666666666664,
"alnum_prop": 0.6469314079422382,
"repo_name": "tseaver/google-cloud-python",
"id": "aff388c7d8dba79e6bf01469e925b2aac9f9c66e",
"size": "8912",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "speech/google/cloud/speech_v1/gapic/enums.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1094"
},
{
"name": "Python",
"bytes": "30519057"
},
{
"name": "Shell",
"bytes": "9148"
}
],
"symlink_target": ""
} |
"""Test Wallet encryption"""
import time
from test_framework.test_framework import DigiByteTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
assert_greater_than,
assert_greater_than_or_equal,
)
class WalletEncryptionTest(DigiByteTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
passphrase = "WalletPassphrase"
passphrase2 = "SecondWalletPassphrase"
# Make sure the wallet isn't encrypted first
address = self.nodes[0].getnewaddress()
privkey = self.nodes[0].dumpprivkey(address)
assert_equal(privkey[:1], "c")
assert_equal(len(privkey), 52)
# Encrypt the wallet
self.nodes[0].encryptwallet(passphrase)
# Test that the wallet is encrypted
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
# Check that walletpassphrase works
self.nodes[0].walletpassphrase(passphrase, 2)
assert_equal(privkey, self.nodes[0].dumpprivkey(address))
# Check that the timeout is right
time.sleep(3)
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
# Test wrong passphrase
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10)
# Test walletlock
self.nodes[0].walletpassphrase(passphrase, 84600)
assert_equal(privkey, self.nodes[0].dumpprivkey(address))
self.nodes[0].walletlock()
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
# Test passphrase changes
self.nodes[0].walletpassphrasechange(passphrase, passphrase2)
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10)
self.nodes[0].walletpassphrase(passphrase2, 10)
assert_equal(privkey, self.nodes[0].dumpprivkey(address))
self.nodes[0].walletlock()
# Test timeout bounds
assert_raises_rpc_error(-8, "Timeout cannot be negative.", self.nodes[0].walletpassphrase, passphrase2, -10)
# Check the timeout
# Check a time less than the limit
MAX_VALUE = 100000000
expected_time = int(time.time()) + MAX_VALUE - 600
self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE - 600)
actual_time = self.nodes[0].getwalletinfo()['unlocked_until']
assert_greater_than_or_equal(actual_time, expected_time)
assert_greater_than(expected_time + 5, actual_time) # 5 second buffer
# Check a time greater than the limit
expected_time = int(time.time()) + MAX_VALUE - 1
self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE + 1000)
actual_time = self.nodes[0].getwalletinfo()['unlocked_until']
assert_greater_than_or_equal(actual_time, expected_time)
assert_greater_than(expected_time + 5, actual_time) # 5 second buffer
if __name__ == '__main__':
WalletEncryptionTest().main()
| {
"content_hash": "120733d9d324fe4f274bf9c401808128",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 138,
"avg_line_length": 42.70886075949367,
"alnum_prop": 0.6760521636040309,
"repo_name": "aurarad/auroracoin",
"id": "8b4f21a2cd646dadf46cf3958fe563e680b6ba57",
"size": "3643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/wallet_encryption.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "721707"
},
{
"name": "C++",
"bytes": "3060648"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "18860"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "31933"
},
{
"name": "Objective-C",
"bytes": "1052"
},
{
"name": "Objective-C++",
"bytes": "6330"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "110348"
},
{
"name": "QMake",
"bytes": "2022"
},
{
"name": "Shell",
"bytes": "51195"
}
],
"symlink_target": ""
} |
class EventContext(object):
__slots__ = [
"current_state_ids",
"prev_state_ids",
"state_group",
"rejected",
"push_actions",
"prev_group",
"delta_ids",
"prev_state_events",
]
def __init__(self):
# The current state including the current event
self.current_state_ids = None
# The current state excluding the current event
self.prev_state_ids = None
self.state_group = None
self.rejected = False
self.push_actions = []
# A previously persisted state group and a delta between that
# and this state.
self.prev_group = None
self.delta_ids = None
self.prev_state_events = None
| {
"content_hash": "30b18e9c56f22ef45816c1e87d008335",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 69,
"avg_line_length": 26.678571428571427,
"alnum_prop": 0.5568942436412316,
"repo_name": "TribeMedia/synapse",
"id": "11605b34a38deea4315c5894ef58763f880bc9a1",
"size": "1356",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "synapse/events/snapshot.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4376"
},
{
"name": "HTML",
"bytes": "9046"
},
{
"name": "JavaScript",
"bytes": "176441"
},
{
"name": "Perl",
"bytes": "31852"
},
{
"name": "Python",
"bytes": "2748398"
},
{
"name": "Shell",
"bytes": "7827"
}
],
"symlink_target": ""
} |
from django import template
register = template.Library()
@register.filter
def class_name(value):
return value.__class__.__name__
| {
"content_hash": "0b44c23516fc0486cfb4c50d03951343",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 35,
"avg_line_length": 15.333333333333334,
"alnum_prop": 0.7101449275362319,
"repo_name": "RegioHelden/django-datawatch",
"id": "b0652ccdde88e4fc882d8f460d0371be19cc7630",
"size": "138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_datawatch/templatetags/class_name.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "710"
},
{
"name": "HTML",
"bytes": "17865"
},
{
"name": "Python",
"bytes": "73346"
}
],
"symlink_target": ""
} |
from kivy import platform
__all__ = ('toast')
_toast = None
def _get_ref():
global _toast
if _toast is None:
if platform == 'android':
from androidtoast import toast
else:
from kivytoast import toast
_toast = toast
return _toast
def toast(text, length_long=False):
_get_ref()(text, length_long=length_long)
| {
"content_hash": "cd3705d19203c69a62ca53f3d78d130e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 45,
"avg_line_length": 19.789473684210527,
"alnum_prop": 0.5904255319148937,
"repo_name": "Fogapod/VKBot",
"id": "5af1a8739e50d478905d739d6332a4f32f7930b4",
"size": "376",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libs/toast/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "139852"
}
],
"symlink_target": ""
} |
from django.contrib.contenttypes import generic
from django.contrib.auth.models import User, Group
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django import forms
from django.utils.timezone import now
from mezzanine.pages.models import Page, RichText
from mezzanine.pages.page_processors import processor_for
from uuid import uuid4
from mezzanine.core.models import Ownable
from mezzanine.generic.fields import CommentsField
from mezzanine.conf import settings as s
from mezzanine.generic.models import Keyword, AssignedKeyword
import os.path
from django_irods.storage import IrodsStorage
# from dublincore.models import QualifiedDublinCoreElement
from dublincore import models as dc
from django.conf import settings
from django.core.files.storage import DefaultStorage
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from languages_iso import languages as iso_languages
from dateutil import parser
from django.utils import simplejson as json
class GroupOwnership(models.Model):
group = models.ForeignKey(Group)
owner = models.ForeignKey(User)
def get_user(request):
"""authorize user based on API key if it was passed, otherwise just use the request's user.
:param request:
:return: django.contrib.auth.User
"""
from tastypie.models import ApiKey
if 'api_key' in request.REQUEST:
api_key = ApiKey.objects.get(key=request.REQUEST['api_key'])
return api_key.user
elif request.user.is_authenticated():
return User.objects.get(pk=request.user.pk)
else:
return request.user
class ResourcePermissionsMixin(Ownable):
creator = models.ForeignKey(User,
related_name='creator_of_%(app_label)s_%(class)s',
help_text='This is the person who first uploaded the resource',
)
public = models.BooleanField(
help_text='If this is true, the resource is viewable and downloadable by anyone',
default=True
)
# DO WE STILL NEED owners?
owners = models.ManyToManyField(User,
related_name='owns_%(app_label)s_%(class)s',
help_text='The person who uploaded the resource'
)
frozen = models.BooleanField(
help_text='If this is true, the resource should not be modified',
default=False
)
do_not_distribute = models.BooleanField(
help_text='If this is true, the resource owner has to designate viewers',
default=False
)
discoverable = models.BooleanField(
help_text='If this is true, it will turn up in searches.',
default=True
)
published_and_frozen = models.BooleanField(
help_text="Once this is true, no changes can be made to the resource",
default=False
)
view_users = models.ManyToManyField(User,
related_name='user_viewable_%(app_label)s_%(class)s',
help_text='This is the set of Hydroshare Users who can view the resource',
null=True, blank=True)
view_groups = models.ManyToManyField(Group,
related_name='group_viewable_%(app_label)s_%(class)s',
help_text='This is the set of Hydroshare Groups who can view the resource',
null=True, blank=True)
edit_users = models.ManyToManyField(User,
related_name='user_editable_%(app_label)s_%(class)s',
help_text='This is the set of Hydroshare Users who can edit the resource',
null=True, blank=True)
edit_groups = models.ManyToManyField(Group,
related_name='group_editable_%(app_label)s_%(class)s',
help_text='This is the set of Hydroshare Groups who can edit the resource',
null=True, blank=True)
class Meta:
abstract = True
@property
def permissions_store(self):
return s.PERMISSIONS_DB
def can_add(self, request):
return self.can_change(request)
def can_delete(self, request):
return self.can_change(request)
def can_change(self, request):
user = get_user(request)
if user.is_authenticated():
if not self.user:
ret = user.is_superuser
elif user.pk == self.creator.pk:
ret = True
elif user.pk in { o.pk for o in self.owners.all() }:
ret = True
elif self.edit_users.filter(pk=user.pk).exists():
ret = True
elif self.edit_groups.filter(pk__in=set(g.pk for g in user.groups.all())):
ret = True
else:
ret = False
else:
ret = False
return ret
def can_view(self, request):
user = get_user(request)
if self.public:
return True
if user.is_authenticated():
if not self.user:
ret = user.is_superuser
elif user.pk == self.creator.pk:
ret = True
elif user.pk in { o.pk for o in self.owners.all() }:
ret = True
elif self.view_users.filter(pk=user.pk).exists():
ret = True
elif self.view_groups.filter(pk__in=set(g.pk for g in user.groups.all())):
ret = True
else:
ret = False
else:
ret = False
return ret
# this should be used as the page processor for anything with pagepermissionsmixin
# page_processor_for(MyPage)(ga_resources.views.page_permissions_page_processor)
def page_permissions_page_processor(request, page):
page = page.get_content_model()
user = get_user(request)
return {
"edit_groups": set(page.edit_groups.all()),
"view_groups": set(page.view_groups.all()),
"edit_users": set(page.edit_users.all()),
"view_users": set(page.view_users.all()),
"can_edit": (user in set(page.edit_users.all())) \
or (len(set(page.edit_groups.all()).intersection(set(user.groups.all()))) > 0)
}
class AbstractMetaDataElement(models.Model):
term = None
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
content_object = generic.GenericForeignKey('content_type', 'object_id')
@property
def metadata(self):
return self.content_object
@classmethod
def create(cls, **kwargs):
raise NotImplementedError("Please implement this method")
@classmethod
def update(cls, element_id, **kwargs):
raise NotImplementedError("Please implement this method")
# could not name this method as 'delete' since the parent 'Model' class has such a method
@classmethod
def remove(cls, element_id):
raise NotImplementedError("Please implement this method")
class Meta:
abstract = True
class ExternalProfileLink(models.Model):
type = models.CharField(max_length=50)
url = models.URLField()
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
content_object = generic.GenericForeignKey('content_type', 'object_id')
class Meta:
unique_together = ("type", "url", "content_type")
class Party(AbstractMetaDataElement):
description = models.URLField(null=True, blank=True)
name = models.CharField(max_length=100)
organization = models.CharField(max_length=200, null=True, blank=True)
email = models.EmailField(null=True, blank=True)
address = models.CharField(max_length=250, null=True, blank=True)
phone = models.CharField(max_length=25, null=True, blank=True)
homepage = models.URLField(null=True, blank=True)
researcherID = models.URLField(null=True, blank=True)
researchGateID = models.URLField(null=True, blank=True)
external_links = generic.GenericRelation(ExternalProfileLink)
def __unicode__(self):
return self.name
class Meta:
abstract = True
@classmethod
def create(cls,**kwargs):
element_name = cls.__name__
if 'name' in kwargs:
if 'metadata_obj' in kwargs:
if not isinstance(kwargs['metadata_obj'], CoreMetaData) and not issubclass(kwargs['metadata_obj'], CoreMetaData):
raise ValidationError("%s metadata element can't be created for metadata type:%s" %(element_name, type(kwargs['metadata_obj'])))
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
if element_name == 'Creator':
party = Creator.objects.filter(object_id=metadata_obj.id, content_type=metadata_type).last()
creator_order = 1
if party:
creator_order = party.order + 1
party = Creator.objects.create(name=kwargs['name'], order=creator_order, content_object=metadata_obj)
else:
party = Contributor.objects.create(name=kwargs['name'], content_object=metadata_obj)
if 'profile_links' in kwargs:
links = kwargs['profile_links']
for link in links:
cls._create_profile_link(party, link)
else:
raise ValidationError("Metadata container object for which metadata element 'Creator' to be created is missing.")
if 'description' in kwargs:
party.description = kwargs['description']
if 'organization' in kwargs:
party.organization = kwargs['organization']
if 'email' in kwargs:
party.email = kwargs['email']
if 'address' in kwargs:
party.address = kwargs['address']
if 'phone' in kwargs:
party.phone = kwargs['phone']
if 'homepage' in kwargs:
party.homepage = kwargs['homepage']
if 'researcherID' in kwargs:
party.researcherID = kwargs['researcherID']
if 'researchGateID' in kwargs:
party.researchGateID = kwargs['researchGateID']
party.save()
return party
else:
raise ValidationError("Name for the %s is missing." % element_name.lower())
@classmethod
def update(cls, element_id, **kwargs):
element_name = cls.__name__
if element_name == 'Creator':
party = Creator.objects.get(id=element_id)
else:
party = Contributor.objects.get(id=element_id)
if party:
if 'name' in kwargs:
party.name = kwargs['name']
if 'description' in kwargs:
party.description = kwargs['description']
if 'organization' in kwargs:
party.organization = kwargs['organization']
if 'email' in kwargs:
party.email = kwargs['email']
if 'address' in kwargs:
party.address = kwargs['address']
if 'phone' in kwargs:
party.phone = kwargs['phone']
if 'homepage' in kwargs:
party.homepage = kwargs['homepage']
if 'researcherID' in kwargs:
party.researcherID = kwargs['researcherID']
if 'researchGateID' in kwargs:
party.researchGateID = kwargs['researchGateID']
# updating the order of a creator needs updating the order attribute of all other creators
# of the same resource
if 'order' in kwargs:
if isinstance(party, Creator):
if kwargs['order'] == 0:
kwargs['order'] = 1
if party.order != kwargs['order']:
resource_creators = Creator.objects.filter(object_id=party.object_id, content_type__pk=party.content_type.id).all()
if kwargs['order'] > len(resource_creators):
kwargs['order'] = len(resource_creators)
for res_cr in resource_creators:
if party.order > kwargs['order']:
if res_cr.order < party.order:
res_cr.order += 1
res_cr.save()
else:
if res_cr.order > party.order:
res_cr.order -= 1
res_cr.save()
party.order = kwargs['order']
#either create or update external profile links
if 'profile_links' in kwargs:
links = kwargs['profile_links']
for link in links:
if 'link_id' in link: # need to update an existing profile link
cls._update_profile_link(party, link)
elif 'type' in link and 'url' in link: # add a new profile link
cls._create_profile_link(party, link)
party.save()
else:
raise ObjectDoesNotExist("No %s was found for the provided id:%s" % (element_name, kwargs['id']))
@classmethod
def remove(cls, element_id):
element_name = cls.__name__
if element_name == 'Creator':
party = Creator.objects.get(id=element_id)
else:
party = Contributor.objects.get(id=element_id)
# if we are deleting a creator, then we have to update the order attribute of remaining
# creators associated with a resource
if party:
# make sure we are not deleting all creators of a resource
if isinstance(party, Creator):
if Creator.objects.filter(object_id=party.object_id, content_type__pk=party.content_type.id).count()== 1:
raise ValidationError("The only creator of the resource can't be deleted.")
creators_to_update = Creator.objects.filter(
object_id=party.object_id, content_type__pk=party.content_type.id).exclude(order=party.order).all()
for cr in creators_to_update:
if cr.order > party.order:
cr.order -= 1
cr.save()
party.delete()
else:
raise ObjectDoesNotExist("No %s element was found for id:%d." % (element_name, element_id))
@classmethod
def _create_profile_link(cls, party, link):
if 'type' in link and 'url' in link:
# check that the type is unique for the party
if party.external_links.filter(type=link['type']).count() > 0:
raise ValidationError("External profile link type:%s already exists "
"for this %s" % (link['type'], type(party).__name__))
if party.external_links.filter(url=link['url']).count() > 0:
raise ValidationError("External profile link url:%s already exists "
"for this %s" % (link['url'], type(party).__name__))
p_link = ExternalProfileLink(type=link['type'], url=link['url'], content_object=party)
p_link.save()
else:
raise ValidationError("Invalid %s profile link data." % type(party).__name__)
@classmethod
def _update_profile_link(cls, party, link):
"""
if the link dict contains only key 'link_id' then the link will be deleted
otherwise the link will be updated
"""
p_link = ExternalProfileLink.objects.get(id=link['link_id'])
if p_link:
if not 'type' in link and not 'url' in link:
# delete the link
p_link.delete()
else:
if 'type' in link:
# check that the type is unique for the party
if p_link.type != link['type']:
if party.external_links.filter(type=link['type']).count() > 0:
raise ValidationError("External profile link type:%s "
"already exists for this %s" % (link['type'], type(party).__name__))
else:
p_link.type = link['type']
if 'url' in link:
# check that the url is unique for the party
if p_link.url != link['url']:
if party.external_links.filter(url=link['url']).count() > 0:
raise ValidationError("External profile link url:%s already exists "
"for this %s" % (link['url'], type(party).__name__))
else:
p_link.url = link['url']
p_link.save()
else:
raise ObjectDoesNotExist("%s external link does not exist "
"for ID:%s" % (type(party).__name__,link['link_id']))
class Contributor(Party):
term = 'Contributor'
# Example of repeatable metadata element
class Creator(Party):
term = "Creator"
order = models.PositiveIntegerField()
class Meta:
ordering = ['order']
class Description(AbstractMetaDataElement):
term = 'Description'
abstract = models.CharField(max_length=500)
def __unicode__(self):
return self.abstract
class Meta:
unique_together = ("content_type", "object_id")
@classmethod
def create(cls, **kwargs):
if 'abstract' in kwargs:
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
# no need to check if a description element already exists
# the Meta settings 'unique_together' will enforce that we have only one description element per resource
description = Description.objects.create(abstract=kwargs['abstract'], content_object=metadata_obj)
return description
else:
raise ValidationError('Metadata instance for which description element to be created is missing.')
else:
raise ValidationError("Abstract of the description element is missing.")
@classmethod
def update(cls, element_id, **kwargs):
description = Description.objects.get(id=element_id)
if description:
if 'abstract' in kwargs:
description.abstract = kwargs['abstract']
description.save()
else:
raise ValidationError('Abstract for description element is missing.')
else:
raise ObjectDoesNotExist("No description element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
raise ValidationError("Description element of a resource can't be deleted.")
class Title(AbstractMetaDataElement):
term = 'Title'
value = models.CharField(max_length=300)
def __unicode__(self):
return self.value
class Meta:
unique_together = ("content_type", "object_id")
@classmethod
def create(cls, **kwargs):
if 'value' in kwargs:
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
title = Title.objects.create(value=kwargs['value'], content_object=metadata_obj)
return title
else:
raise ValidationError('Metadata instance for which title element to be created is missing.')
else:
raise ValidationError("Value of the title element is missing.")
@classmethod
def update(cls, element_id, **kwargs):
title = Title.objects.get(id=element_id)
if title:
if 'value' in kwargs:
title.value = kwargs['value']
title.save()
else:
raise ValidationError('Value for title is missing.')
else:
raise ValidationError("No title element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
raise ValidationError("Title element of a resource can't be deleted.")
class Type(AbstractMetaDataElement):
term = 'Type'
url = models.URLField()
def __unicode__(self):
return self.value
class Meta:
unique_together = ("content_type", "object_id")
@classmethod
def create(cls, **kwargs):
if 'url' in kwargs:
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
type = Type.objects.create(url=kwargs['url'], content_object=metadata_obj)
return type
else:
raise ValidationError('Metadata instance for which type element to be created is missing.')
else:
raise ValidationError("URL of the type element is missing.")
@classmethod
def update(cls, element_id, **kwargs):
type = Type.objects.get(id=element_id)
if type:
if 'url' in kwargs:
type.url = kwargs['url']
type.save()
else:
raise ValidationError('URL for type element is missing.')
else:
raise ObjectDoesNotExist("No type element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
raise ValidationError("Type element of a resource can't be deleted.")
class Date(AbstractMetaDataElement):
DATE_TYPE_CHOICES=(
('created', 'Created'),
('modified', 'Modified'),
('valid', 'Valid'),
('available', 'Available'),
('published', 'Published')
)
term = 'Date'
type = models.CharField(max_length=20, choices=DATE_TYPE_CHOICES)
start_date = models.DateTimeField()
end_date = models.DateTimeField(null=True, blank=True)
@classmethod
def create(cls, **kwargs):
if 'type' in kwargs:
# check the type doesn't already exists - we allow only one date type per resource
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
dt = Date.objects.filter(type= kwargs['type'], object_id=metadata_obj.id, content_type=metadata_type).first()
if dt:
raise ValidationError('Date type:%s already exists' % kwargs['type'])
if not kwargs['type'] in ['created', 'modified', 'valid', 'available', 'published']:
raise ValidationError('Invalid date type:%s' % kwargs['type'])
if kwargs['type'] == 'published':
if not metadata_obj.resource.published_and_frozen:
raise ValidationError("Resource is not published yet.")
if kwargs['type'] == 'available':
if not metadata_obj.resource.public:
raise ValidationError("Resource has not been shared yet.")
if 'start_date' in kwargs:
try:
start_dt = parser.parse(str(kwargs['start_date']))
except TypeError:
raise TypeError("Not a valid date value.")
else:
raise ValidationError('Date value is missing.')
# end_date is used only for date type 'valid'
if kwargs['type'] == 'valid':
if 'end_date' in kwargs:
try:
end_dt = parser.parse(str(kwargs['end_date']))
except TypeError:
raise TypeError("Not a valid end date value.")
dt = Date.objects.create(type=kwargs['type'], start_date=start_dt, end_date=end_dt, content_object=metadata_obj)
else:
dt = Date.objects.create(type=kwargs['type'], start_date=start_dt, content_object=metadata_obj)
else:
dt = Date.objects.create(type=kwargs['type'], start_date=start_dt, content_object=metadata_obj)
return dt
else:
raise ValidationError('Metadata instance for which date element to be created is missing.')
else:
raise ValidationError("Type of date element is missing.")
@classmethod
def update(cls, element_id, **kwargs):
dt = Date.objects.get(id=element_id)
metadata_obj = kwargs['metadata_obj']
if dt:
if 'start_date' in kwargs:
try:
start_dt = parser.parse(str(kwargs['start_date']))
except TypeError:
raise TypeError("Not a valid date value.")
if dt.type == 'created':
raise ValidationError("Resource creation date can't be changed")
elif dt.type == 'modified':
dt.start_date = metadata_obj.resource.updated
dt.save()
elif dt.type == 'valid':
if 'end_date' in kwargs:
try:
end_dt = parser.parse(str(kwargs['end_date']))
except TypeError:
raise TypeError("Not a valid date value.")
dt.start_date = start_dt
dt.end_date = end_dt
dt.save()
else:
dt.start_date = start_dt
dt.save()
else:
dt.start_date = start_dt
dt.save()
else:
raise ValidationError("Date value is missing.")
else:
raise ObjectDoesNotExist("No date element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
dt = Date.objects.get(id=element_id)
if dt:
if dt.type in ['created', 'modified']:
raise ValidationError("Date element of type:%s can't be deleted." % dt.type)
dt.delete()
else:
raise ObjectDoesNotExist("No date element was found for id:%d." % element_id)
class Relation(AbstractMetaDataElement):
SOURCE_TYPES= (
('isPartOf', 'Part Of'),
('isExecutedBy', 'Executed By'),
('isCreatedBy', 'Created By'),
('isVersionOf', 'Version Of'),
('isDataFor', 'Data For'),
)
term = 'Relation'
type = models.CharField(max_length=100, choices=SOURCE_TYPES)
value = models.CharField(max_length=500)
@classmethod
def create(cls, **kwargs):
if 'type' in kwargs:
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
rel = Relation.objects.filter(type= kwargs['type'], object_id=metadata_obj.id, content_type=metadata_type).first()
if rel:
raise ValidationError('Relation type:%s already exists.' % kwargs['type'])
if 'value' in kwargs:
rel = Relation.objects.create(type=kwargs['type'], value=kwargs['value'], content_object=metadata_obj)
return rel
else:
raise ValidationError('Value for relation element is missing.')
else:
raise ValidationError('Metadata instance for which relation element to be created is missing.')
else:
raise ObjectDoesNotExist("Type of relation element is missing.")
@classmethod
def update(cls, element_id, **kwargs):
rel = Relation.objects.get(id=element_id)
if rel:
if 'type' in kwargs:
if rel.type != kwargs['type']:
# check this new relation type not already exists
if Relation.objects.filter(type=kwargs['type'], object_id=rel.object_id,
content_type__pk=rel.content_type.id).count()> 0:
raise ValidationError( 'Relation type:%s already exists.' % kwargs['type'])
else:
rel.type = kwargs['type']
if 'value' in kwargs:
rel.value = kwargs['value']
rel.save()
else:
raise ObjectDoesNotExist("No relation element exists for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
rel = Relation.objects.get(id=element_id)
if rel:
rel.delete()
else:
raise ObjectDoesNotExist("No relation element exists for id:%d." % element_id)
class Identifier(AbstractMetaDataElement):
term = 'Identifier'
name = models.CharField(max_length=100)
url = models.URLField(unique=True)
@classmethod
def create(cls, **kwargs):
if 'name' in kwargs:
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
# check the identifier name doesn't already exist - identifier name needs to be unique per resource
idf = Identifier.objects.filter(name__iexact= kwargs['name'], object_id=metadata_obj.id, content_type=metadata_type).first()
if idf:
raise ValidationError('Identifier name:%s already exists' % kwargs['name'])
if kwargs['name'].lower() == 'doi':
if not metadata_obj.resource.doi:
raise ValidationError("Identifier of 'DOI' type can't be created for a resource that has not been assign a DOI yet.")
if 'url' in kwargs:
idf = Identifier.objects.create(name=kwargs['name'], url=kwargs['url'], content_object=metadata_obj)
return idf
else:
raise ValidationError('URL for the identifier is missing.')
else:
raise ValidationError('Metadata instance for which date element to be created is missing.')
else:
raise ValidationError("Name of identifier element is missing.")
@classmethod
def update(cls, element_id, **kwargs):
idf = Identifier.objects.get(id=element_id)
if idf:
if 'name' in kwargs:
if idf.name.lower() != kwargs['name'].lower():
if idf.name.lower() == 'hydroshareidentifier':
raise ValidationError("Identifier name 'hydroshareIdentifier' can't be changed.")
if idf.name.lower() == 'doi':
raise ValidationError("Identifier name 'DOI' can't be changed.")
# check this new identifier name not already exists
if Identifier.objects.filter(name__iexact=kwargs['name'], object_id=idf.object_id,
content_type__pk=idf.content_type.id).count()> 0:
raise ValidationError('Identifier name:%s already exists.' % kwargs['name'])
idf.name = kwargs['name']
if 'url' in kwargs:
if idf.url.lower() != kwargs['url'].lower():
if idf.url.lower().find('http://hydroshare.org/resource') == 0:
raise ValidationError("Hydroshare identifier url value can't be changed.")
# check this new identifier name not already exists
if Identifier.objects.filter(url__iexact=kwargs['url'], object_id=idf.object_id,
content_type__pk=idf.content_type.id).count()> 0:
raise ValidationError('Identifier URL:%s already exists.' % kwargs['url'])
idf.url = kwargs['url']
idf.save()
else:
raise ObjectDoesNotExist( "No identifier element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
idf = Identifier.objects.get(id=element_id)
if idf:
if idf.name.lower() == 'hydroshareidentifier':
raise ValidationError("Hydroshare identifier:%s can't be deleted." % idf.name)
if idf.name.lower() == 'doi':
if idf.content_object.resource.doi:
raise ValidationError("Hydroshare identifier:%s can't be deleted for a resource that has been assigned a DOI." % idf.name)
idf.delete()
else:
raise ObjectDoesNotExist("No identifier element was found for id:%d." % element_id)
class Publisher(AbstractMetaDataElement):
term = 'Publisher'
name = models.CharField(max_length=200)
url = models.URLField()
class Meta:
unique_together = ("content_type", "object_id")
@classmethod
def create(cls, **kwargs):
if 'name' in kwargs:
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
if 'url' in kwargs:
if not metadata_obj.resource.public and metadata_obj.resource.published_and_frozen:
raise ValidationError("Publisher element can't be created for a resource that is not shared nor published.")
if kwargs['name'].lower() == 'hydroshare':
if not metadata_obj.resource.files.all():
raise ValidationError("Hydroshare can't be the publisher for a resource that has no content files.")
else:
kwargs['name'] = 'HydroShare'
kwargs['url'] = 'http://hydroshare.org'
pub = Publisher.objects.create(name=kwargs['name'], url=kwargs['url'], content_object=metadata_obj)
return pub
else:
raise ValidationError('URL for the publisher is missing.')
else:
raise ValidationError('Metadata instance for which publisher element to be created is missing.')
else:
raise ValidationError("Name of publisher is missing.")
@classmethod
def update(cls, element_id, **kwargs):
pub = Publisher.objects.get(id=element_id)
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
else:
raise ValidationError('Metadata instance for which publisher element to be updated is missing.')
if metadata_obj.resource.frozen:
raise ValidationError("Resource metadata can't be edited when the resource is in frozen state.")
if metadata_obj.resource.published_and_frozen:
raise ValidationError("Resource metadata can't be edited once the resource has been published.")
if pub:
if 'name' in kwargs:
if pub.name.lower() != kwargs['name'].lower():
if pub.name.lower() == 'hydroshare':
if metadata_obj.resource.files.all():
raise ValidationError("Publisher 'HydroShare' can't be changed for a resource that has content files.")
elif kwargs['name'].lower() == 'hydroshare':
if not metadata_obj.resource.files.all():
raise ValidationError("'HydroShare' can't be a publisher for a resource that has no content files.")
if metadata_obj.resource.files.all():
pub.name = 'HydroShare'
else:
pub.name = kwargs['name']
if 'url' in kwargs:
if pub.url != kwargs['url']:
# make sure we are not changing the url for hydroshare publisher
if pub.name.lower() == 'hydroshare':
pub.url = 'http://hydroshare.org'
else:
pub.url = kwargs['url']
pub.save()
else:
raise ObjectDoesNotExist("No publisher element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
pub = Publisher.objects.get(id=element_id)
if pub.content_object.resource.frozen:
raise ValidationError("Resource metadata can't be edited when the resource is in frozen state.")
if pub.content_object.resource.published_and_frozen:
raise ValidationError("Resource metadata can't be edited once the resource has been published.")
if pub.content_object.resource.public:
raise ValidationError("Resource publisher can't be deleted for shared resource.")
if pub:
if pub.name.lower() == 'hydroshare':
if pub.content_object.resource.files.all():
raise ValidationError("Publisher HydroShare can't be deleted for a resource that has content files.")
if pub.content_object.resource.public:
raise ValidationError("Publisher can't be deleted for a public resource.")
pub.delete()
else:
raise ObjectDoesNotExist("No publisher element was found for id:%d." % element_id)
class Language(AbstractMetaDataElement):
term = 'Language'
code = models.CharField(max_length=3, choices=iso_languages )
def __unicode__(self):
return self.code
@classmethod
def create(cls, **kwargs):
if 'code' in kwargs:
# check the code doesn't already exists - format values need to be unique per resource
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
lang = Language.objects.filter(object_id=metadata_obj.id, content_type=metadata_type).first()
if lang:
raise ValidationError('Language element already exists.')
else:
raise ValidationError('Metadata instance for which langauge element to be created is missing.')
# check the code is a valid code
if not [t for t in iso_languages if t[0]==kwargs['code']]:
raise ValidationError('Invalid language code:%s' % kwargs['code'])
lang = Language.objects.create(code=kwargs['code'], content_object=metadata_obj)
return lang
else:
raise ValidationError("Language code is missing.")
@classmethod
def update(cls, element_id, **kwargs):
lang = Language.objects.get(id=element_id)
if lang:
if 'code' in kwargs:
# validate code
if not [t for t in iso_languages if t[0]==kwargs['code']]:
raise ValidationError('Invalid language code:%s' % kwargs['code'])
if lang.code != kwargs['code']:
# check this new language not already exists
if Language.objects.filter(code=kwargs['code'], object_id=lang.object_id,
content_type__pk=lang.content_type.id).count()> 0:
raise ValidationError('Language:%s already exists.' % kwargs['code'])
lang.code = kwargs['code']
lang.save()
else:
raise ValidationError('Language code is missing.')
else:
raise ObjectDoesNotExist("No language element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
lang = Language.objects.get(id=element_id)
if lang:
lang.delete()
else:
raise ObjectDoesNotExist("No language element was found for id:%d." % element_id)
class Coverage(AbstractMetaDataElement):
COVERAGE_TYPES = (
('box', 'Box'),
('point', 'Point'),
('period', 'Period')
)
term = 'Coverage'
type = models.CharField(max_length=20, choices=COVERAGE_TYPES)
"""
_value field stores a json string. The content of the json
string depends on the type of coverage as shown below. All keys shown in json string are required.
For coverage type: period
_value = "{'name':coverage name value here, 'start':start date value, 'end':end date value, 'scheme':'W3C-DTF}"
For coverage type: point
_value = "{'name':coverage name value here, 'east':east coordinate value, 'north':north coordinate value}"
For coverage type: box
_value = "{'name':coverage name value here, 'northlimit':northenmost coordinate value,
'eastlimit':easternmost coordinate value, 'southlimit':southernmost coordinate value,
'westlimit':westernmost coordinate value}"
"""
_value = models.CharField(max_length=1024)
@property
def value(self):
print self._value
return json.loads(self._value)
@classmethod
def create(cls, **kwargs):
# TODO: validate coordinate values
if 'type' in kwargs:
# check the type doesn't already exists - we allow only one coverage type per resource
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
coverage = Coverage.objects.filter(type= kwargs['type'], object_id=metadata_obj.id,
content_type=metadata_type).first()
if coverage:
raise ValidationError('Coverage type:%s already exists' % kwargs['type'])
if not kwargs['type'] in ['box', 'point', 'period']:
raise ValidationError('Invalid coverage type:%s' % kwargs['type'])
if kwargs['type'] == 'box':
# check that there is not already a coverage of point type
coverage = Coverage.objects.filter(type= 'point', object_id=metadata_obj.id,
content_type=metadata_type).first()
if coverage:
raise ValidationError("Coverage type 'Box' can't be created when there is a coverage of type 'Point'")
elif kwargs['type'] == 'point':
# check that there is not already a coverage of box type
coverage = Coverage.objects.filter(type= 'box', object_id=metadata_obj.id,
content_type=metadata_type).first()
if coverage:
raise ValidationError("Coverage type 'Point' can't be created when there is a coverage of type 'Box'")
if 'value' in kwargs:
if isinstance(kwargs['value'], dict):
if not 'name' in kwargs['value']:
raise ValidationError("Coverage name attribute is missing.")
cls._validate_coverage_type_value_attributes(kwargs['type'], kwargs['value'])
if kwargs['type']== 'period':
value_dict = {k: v for k, v in kwargs['value'].iteritems() if k in ('name', 'start', 'end')}
elif kwargs['type']== 'point':
value_dict = {k: v for k, v in kwargs['value'].iteritems() if k in ('name', 'east', 'north')}
elif kwargs['type']== 'box':
value_dict = {k: v for k, v in kwargs['value'].iteritems()
if k in ('name','northlimit', 'eastlimit', 'southlimit', 'westlimit')}
value_json = json.dumps(value_dict)
cov = Coverage.objects.create(type=kwargs['type'], _value=value_json,
content_object=metadata_obj)
return cov
else:
raise ValidationError('Invalid coverage value format.')
else:
raise ValidationError('Coverage value is missing.')
else:
raise ValidationError('Metadata instance for which date element to be created is missing.')
else:
raise ValidationError("Type of coverage element is missing.")
@classmethod
def update(cls, element_id, **kwargs):
# TODO: validate coordinate values
cov = Coverage.objects.get(id=element_id)
changing_coverage_type = False
if cov:
if 'type' in kwargs:
if cov.type != kwargs['type']:
# check this new coverage type not already exists
if Coverage.objects.filter(type=kwargs['type'], object_id=cov.object_id,
content_type__pk=cov.content_type.id).count()> 0:
raise ValidationError('Coverage type:%s already exists.' % kwargs['type'])
else:
if 'value' in kwargs:
if isinstance(kwargs['value'], dict):
cls._validate_coverage_type_value_attributes(kwargs['type'], kwargs['value'])
else:
raise ValidationError('Invalid coverage value format.')
else:
raise ValidationError('Coverage value is missing.')
changing_coverage_type = True
if 'value' in kwargs:
if not isinstance(kwargs['value'], dict):
raise ValidationError('Invalid coverage value format.')
if changing_coverage_type:
value_dict = {}
cov.type = kwargs['type']
else:
value_dict = cov.value
if 'name' in kwargs['value']:
value_dict['name'] = kwargs['value']['name']
if cov.type == 'period':
for item_name in ('start', 'end'):
if item_name in kwargs['value']:
value_dict[item_name] = kwargs['value'][item_name]
elif cov.type == 'point':
for item_name in ('east', 'north'):
if item_name in kwargs['value']:
value_dict[item_name] = kwargs['value'][item_name]
elif cov.type == 'box':
for item_name in ('northlimit', 'eastlimit', 'southlimit', 'westlimit'):
if item_name in kwargs['value']:
value_dict[item_name] = kwargs['value'][item_name]
value_json = json.dumps(value_dict)
cov._value = value_json
cov.save()
else:
raise ObjectDoesNotExist("No coverage element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
raise ValidationError("Coverage element can't be deleted.")
@classmethod
def _validate_coverage_type_value_attributes(cls, coverage_type, value_dict):
if coverage_type== 'period':
if not 'start' in value_dict or not 'end' in value_dict:
raise ValidationError("For coverage of type 'period' values for start date and end date needed.")
else:
# validate the date values
try:
start_dt = parser.parse(value_dict['start'])
except TypeError:
raise TypeError("Invalid start date. Not a valid date value.")
try:
end_dt = parser.parse(value_dict['end'])
except TypeError:
raise TypeError("Invalid end date. Not a valid date value.")
elif coverage_type== 'point':
if not 'east' in value_dict or not 'north' in value_dict:
raise ValidationError("For coverage of type 'period' values for both start date and end date are needed.")
elif coverage_type== 'box':
for value_item in ['name','northlimit', 'eastlimit', 'southlimit', 'westlimit']:
if not value_item in value_dict:
raise ValidationError("For coverage of type 'box' values for one or more bounding box limits is missing.")
class Format(AbstractMetaDataElement):
term = 'Format'
value = models.CharField(max_length=50)
def __unicode__(self):
return self.value
@classmethod
def create(cls, **kwargs):
if 'value' in kwargs:
# check the format doesn't already exists - format values need to be unique per resource
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
format = Format.objects.filter(value__iexact= kwargs['value'], object_id=metadata_obj.id, content_type=metadata_type).first()
if format:
raise ValidationError('Format:%s already exists' % kwargs['value'])
format = Format.objects.create(value=kwargs['value'], content_object=metadata_obj)
return format
else:
raise ValidationError('Metadata instance for which format element to be created is missing.')
else:
raise ValidationError("Format value is missing.")
@classmethod
def update(cls, element_id, **kwargs):
format = Format.objects.get(id=element_id)
if format:
if 'value' in kwargs:
if format.value != kwargs['value']:
# check this new format not already exists
if Format.objects.filter(value=kwargs['value'], object_id=format.object_id,
content_type__pk=format.content_type.id).count()> 0:
raise ValidationError('Format:%s already exists.' % kwargs['value'])
format.value = kwargs['value']
format.save()
else:
raise ValidationError('Value for format is missing.')
else:
raise ObjectDoesNotExist("No format element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
format = Format.objects.get(id=element_id)
if format:
format.delete()
else:
raise ObjectDoesNotExist("No format element was found for id:%d." % element_id)
class Subject(AbstractMetaDataElement):
term = 'Subject'
value = models.CharField(max_length=100)
def __unicode__(self):
return self.value
@classmethod
def create(cls, **kwargs):
if 'value' in kwargs:
# check the subject doesn't already exists - subjects need to be unique per resource
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
sub = Subject.objects.filter(value__iexact= kwargs['value'], object_id=metadata_obj.id, content_type=metadata_type).first()
if sub:
raise ValidationError('Subject:%s already exists for this resource.' % kwargs['value'])
sub = Subject.objects.create(value=kwargs['value'], content_object=metadata_obj)
return sub
else:
raise ValidationError('Metadata instance for which subject element to be created is missing.')
else:
raise ValidationError("Subject value is missing.")
@classmethod
def update(cls, element_id, **kwargs):
sub = Subject.objects.get(id=element_id)
if sub:
if 'value' in kwargs:
if sub.value != kwargs['value']:
# check this new subject not already exists
if Subject.objects.filter(value__iexact=kwargs['value'], object_id=sub.object_id,
content_type__pk=sub.content_type.id).count()> 0:
raise ValidationError('Subject:%s already exists for this resource.' % kwargs['value'])
sub.value = kwargs['value']
sub.save()
else:
raise ValidationError('Value for subject is missing.')
else:
raise ObjectDoesNotExist("No format element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
sub = Subject.objects.get(id=element_id)
if sub:
if Subject.objects.filter(object_id=sub.object_id,
content_type__pk=sub.content_type.id).count() == 1:
raise ValidationError("The only subject element of the resource con't be deleted.")
sub.delete()
else:
raise ObjectDoesNotExist("No subject element was found for id:%d." % element_id)
class Source(AbstractMetaDataElement):
term = 'Source'
derived_from = models.CharField(max_length=300)
def __unicode__(self):
return self.derived_from
@classmethod
def create(cls, **kwargs):
if 'derived_from' in kwargs:
# check the source doesn't already exists - source needs to be unique per resource
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
metadata_type = ContentType.objects.get_for_model(metadata_obj)
src = Source.objects.filter(derived_from= kwargs['derived_from'], object_id=metadata_obj.id, content_type=metadata_type).first()
if src:
raise ValidationError('Source:%s already exists for this resource.' % kwargs['derived_from'])
src = Source.objects.create(derived_from=kwargs['derived_from'], content_object=metadata_obj)
return src
else:
raise ValidationError('Metadata instance for which source element to be created is missing.')
else:
raise ValidationError("Source data is missing.")
@classmethod
def update(cls, element_id, **kwargs):
src = Source.objects.get(id=element_id)
if src:
if 'derived_from' in kwargs:
if src.derived_from != kwargs['derived_from']:
# check this new derived_from not already exists
if Source.objects.filter(derived_from__iexact=kwargs['derived_from'], object_id=src.object_id,
content_type__pk=src.content_type.id).count()> 0:
raise ValidationError('Source:%s already exists for this resource.' % kwargs['value'])
src.derived_from = kwargs['derived_from']
src.save()
else:
raise ValidationError('Value for source is missing.')
else:
raise ObjectDoesNotExist("No source element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
src = Source.objects.get(id=element_id)
if src:
src.delete()
else:
raise ObjectDoesNotExist("No source element was found for id:%d." % element_id)
class Rights(AbstractMetaDataElement):
term = 'Rights'
statement = models.TextField(null=True, blank=True)
url = models.URLField(null=True, blank=True)
class Meta:
unique_together = ("content_type", "object_id")
@classmethod
def create(cls, **kwargs):
# the Meta class setting "unique-tigether' enforces that we have only one rights element per resource
if 'metadata_obj' in kwargs:
metadata_obj = kwargs['metadata_obj']
else:
raise ValidationError('Metadata instance for which rights element to be created is missing.')
# in order to create a Rights element we need to have either a value for the statement field or a value for the url field
if 'statement' in kwargs and 'url' in kwargs:
rights = Rights.objects.create(statement=kwargs['statement'], url=kwargs['url'], content_object=metadata_obj)
return rights
elif 'url' in kwargs:
rights = Rights.objects.create(url=kwargs['url'], content_object=metadata_obj)
return rights
elif 'statement' in kwargs:
rights = Rights.objects.create(statement=kwargs['statement'], content_object=metadata_obj)
return rights
else:
raise ValidationError("Statement and/or URL of rights is missing.")
@classmethod
def update(cls, element_id, **kwargs):
rights = Rights.objects.get(id=element_id)
if rights:
if 'statement' in kwargs:
rights.statement = kwargs['statement']
if 'url' in kwargs:
rights.url = kwargs['url']
rights.save()
else:
raise ObjectDoesNotExist("No rights element was found for the provided id:%s" % element_id)
@classmethod
def remove(cls, element_id):
raise ValidationError("Rights element of a resource can't be deleted.")
class AbstractResource(ResourcePermissionsMixin):
"""
All hydroshare objects inherit from this mixin. It defines things that must
be present to be considered a hydroshare resource. Additionally, all
hydroshare resources should inherit from Page. This gives them what they
need to be represented in the Mezzanine CMS.
In some cases, it is possible that the order of inheritence matters. Best
practice dictates that you list pages.Page first and then other classes:
class MyResourceContentType(pages.Page, hs_core.AbstractResource):
...
"""
last_changed_by = models.ForeignKey(User,
help_text='The person who last changed the resource',
related_name='last_changed_%(app_label)s_%(class)s',
null=True
)
dublin_metadata = generic.GenericRelation(
'dublincore.QualifiedDublinCoreElement',
help_text='The dublin core metadata of the resource'
)
files = generic.GenericRelation('hs_core.ResourceFile', help_text='The files associated with this resource')
bags = generic.GenericRelation('hs_core.Bags', help_text='The bagits created from versions of this resource')
short_id = models.CharField(max_length=32, default=lambda: uuid4().hex, db_index=True)
doi = models.CharField(max_length=1024, blank=True, null=True, db_index=True,
help_text='Permanent identifier. Never changes once it\'s been set.')
comments = CommentsField()
# this is to establish a relationship between a resource and
# any metadata container object (e.g., CoreMetaData object)
object_id = models.PositiveIntegerField(null=True, blank=True)
content_type = models.ForeignKey(ContentType, null=True, blank=True)
content_object = generic.GenericForeignKey('content_type', 'object_id')
# this property needs to be overriden by any specific resource type
# that needs additional metadata elements on top of core metadata data elements
@property
def metadata(self):
md = CoreMetaData() # only this line needs to be changed when you override
return self._get_metadata(md)
def _get_metadata(self, metatdata_obj):
md_type = ContentType.objects.get_for_model(metatdata_obj)
res_type = ContentType.objects.get_for_model(self)
self.content_object = res_type.model_class().objects.get(id=self.id).content_object
if self.content_object:
return self.content_object
else:
metatdata_obj.save()
self.content_type = md_type
self.object_id = metatdata_obj.id
self.save()
return metatdata_obj
def extra_capabilites(self):
"""This is not terribly well defined yet, but should return at the least a JSON serializable object of URL
endpoints where extra self-describing services exist and can be queried by the user in the form of
{ "name" : "endpoint" }
"""
return None
class Meta:
abstract = True
unique_together = ("content_type", "object_id")
def get_path(instance, filename):
return os.path.join(instance.content_object.short_id, filename)
class ResourceFile(models.Model):
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
content_object = generic.GenericForeignKey('content_type', 'object_id')
resource_file = models.FileField(upload_to=get_path, storage=IrodsStorage() if getattr(settings,'USE_IRODS', False) else DefaultStorage())
class Bags(models.Model):
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType)
content_object = generic.GenericForeignKey('content_type', 'object_id')
bag = models.FileField(upload_to='bags', storage=IrodsStorage() if getattr(settings,'USE_IRODS', False) else DefaultStorage(), null=True) # actually never null
timestamp = models.DateTimeField(default=now, db_index=True)
class Meta:
ordering = ['-timestamp']
class GenericResource(Page, RichText, AbstractResource):
class Meta:
verbose_name = 'Generic Hydroshare Resource'
def can_add(self, request):
return AbstractResource.can_add(self, request)
def can_change(self, request):
return AbstractResource.can_change(self, request)
def can_delete(self, request):
return AbstractResource.can_delete(self, request)
def can_view(self, request):
return AbstractResource.can_view(self, request)
# This model has a one-to-one relation with the AbstractResource model
class CoreMetaData(models.Model):
#from django.contrib.sites.models import Site
_domain = 'hydroshare.org' #Site.objects.get_current() # this one giving error since the database does not have a related table called 'django_site'
XML_HEADER = '''<?xml version="1.0"?>
<!DOCTYPE rdf:RDF PUBLIC "-//DUBLIN CORE//DCMES DTD 2002/07/31//EN"
"http://dublincore.org/documents/2002/07/31/dcmes-xml/dcmes-xml-dtd.dtd">'''
NAMESPACES = {'rdf':"http://www.w3.org/1999/02/22-rdf-syntax-ns#",
'dc': "http://purl.org/dc/elements/1.1/",
'dcterms':"http://purl.org/dc/terms/",
'hsterms': "http://hydroshare.org/terms/"}
DATE_FORMAT = "YYYY-MM-DDThh:mm:ssTZD"
HYDROSHARE_URL = 'http://%s' % _domain
id = models.AutoField(primary_key=True)
_description = generic.GenericRelation(Description) # resource abstract
_title = generic.GenericRelation(Title)
creators = generic.GenericRelation(Creator)
contributors = generic.GenericRelation(Contributor)
dates = generic.GenericRelation(Date)
coverages = generic.GenericRelation(Coverage)
formats = generic.GenericRelation(Format)
identifiers = generic.GenericRelation(Identifier)
_language = generic.GenericRelation(Language)
subjects = generic.GenericRelation(Subject)
sources = generic.GenericRelation(Source)
relations = generic.GenericRelation(Relation)
_rights = generic.GenericRelation(Rights)
_type = generic.GenericRelation(Type)
_publisher = generic.GenericRelation(Publisher)
_resource = generic.GenericRelation(GenericResource)
@property
def title(self):
return self._title.all().first()
@property
def description(self):
return self._description.all().first()
@property
def language(self):
return self._language.all().first()
@property
def resource(self):
return self._resource.all().first()
@property
def rights(self):
return self._rights.all().first()
@property
def type(self):
return self._type.all().first()
@property
def publisher(self):
return self._publisher.all().first()
@classmethod
def get_supported_element_names(cls):
return ['Description',
'Creator',
'Contributor',
'Coverage',
'Format',
'Rights',
'Title',
'Type',
'Date',
'Identifier',
'Language',
'Subject',
'Source',
'Relation',
'Publisher']
# this method needs to be overriden by any subclass of this class
def delete_all_elements(self):
if self.title: self.title.delete()
if self.description: self.description.delete()
if self.language: self.language.delete()
if self.rights: self.rights.delete()
if self.publisher: self.publisher.delete()
if self.type: self.type.delete()
self.creators.all().delete()
self.contributors.all().delete()
self.dates.all().delete()
self.identifiers.all().delete()
self.coverages.all().delete()
self.formats.all().delete()
self.subjects.all().delete()
self.sources.all().delete()
self.relations.all().delete()
def get_xml(self):
from lxml import etree
import arrow
RDF_ROOT = etree.Element('{%s}RDF' % self.NAMESPACES['rdf'], nsmap=self.NAMESPACES)
# create the Description element -this is not exactly a dc element
rdf_Description = etree.SubElement(RDF_ROOT, '{%s}Description' % self.NAMESPACES['rdf'])
resource_uri = self.HYDROSHARE_URL + '/resource/' + self.resource.short_id
rdf_Description.set('{%s}about' % self.NAMESPACES['rdf'], resource_uri)
# create the title element
if self.title:
dc_title = etree.SubElement(rdf_Description, '{%s}title' % self.NAMESPACES['dc'])
dc_title.text = self.title.value
# create the type element
if self.type:
dc_type = etree.SubElement(rdf_Description, '{%s}type' % self.NAMESPACES['dc'])
dc_type.set('{%s}resource' % self.NAMESPACES['rdf'], self.type.url)
# create the Description element (we named it as Abstract to differentiate from the parent "Description" element)
if self.description:
dc_description = etree.SubElement(rdf_Description, '{%s}description' % self.NAMESPACES['dc'])
dc_des_rdf_Desciption = etree.SubElement(dc_description, '{%s}Description' % self.NAMESPACES['rdf'])
dcterms_abstract = etree.SubElement(dc_des_rdf_Desciption, '{%s}abstract' % self.NAMESPACES['dcterms'])
dcterms_abstract.text = self.description.abstract
# use all creators associated with this metadata object to
# generate creator xml elements
for creator in self.creators.all():
self._create_person_element(etree, rdf_Description, creator)
for contributor in self.contributors.all():
self._create_person_element(etree, rdf_Description, contributor)
for coverage in self.coverages.all():
dc_coverage = etree.SubElement(rdf_Description, '{%s}coverage' % self.NAMESPACES['dc'])
cov_dcterm = '{%s}' + coverage.type
dc_coverage_dcterms = etree.SubElement(dc_coverage, cov_dcterm % self.NAMESPACES['dcterms'])
rdf_coverage_value = etree.SubElement(dc_coverage_dcterms, '{%s}value' % self.NAMESPACES['rdf'])
if coverage.type == 'period':
cov_value = 'name=%s; start=%s; end=%s; scheme=W3C-DTF' %(coverage.value['name'],
arrow.get(coverage.value['start'].format(self.DATE_FORMAT)),
arrow.get(coverage.value['start'].format(self.DATE_FORMAT)))
elif coverage.type == 'point':
cov_value = 'name=%s; east=%s; north=%s' %(coverage.value['name'],
coverage.value['east'],
coverage.value['north'])
else: # this is box type
cov_value = 'name=%s; northlimit=%s; eastlimit=%s; southlimit=%s; westlimit=%s' \
%(coverage.value['name'], coverage.value['northlimit'], coverage.value['eastlimit'],
coverage.value['southlimit'], coverage.value['westlimit'])
rdf_coverage_value.text = cov_value
for dt in self.dates.all():
dc_date = etree.SubElement(rdf_Description, '{%s}date' % self.NAMESPACES['dc'])
dc_term = '{%s}'+ dt.type
dc_date_dcterms = etree.SubElement(dc_date, dc_term % self.NAMESPACES['dcterms'])
rdf_date_value = etree.SubElement(dc_date_dcterms, '{%s}value' % self.NAMESPACES['rdf'])
if dt.type != 'valid':
rdf_date_value.text = arrow.get(dt.start_date).format(self.DATE_FORMAT)
else:
if dt.end_date:
rdf_date_value.text = "start=%s; end=%s" % (arrow.get(dt.start_date).format(self.DATE_FORMAT), arrow.get(dt.end_date).format(self.DATE_FORMAT))
else:
rdf_date_value.text = arrow.get(dt.start_date).format(self.DATE_FORMAT)
for fmt in self.formats.all():
dc_format = etree.SubElement(rdf_Description, '{%s}format' % self.NAMESPACES['dc'])
dc_format.text = fmt.value
for res_id in self.identifiers.all():
dc_identifier = etree.SubElement(rdf_Description, '{%s}identifier' % self.NAMESPACES['dc'])
dc_id_rdf_Description = etree.SubElement(dc_identifier, '{%s}Description' % self.NAMESPACES['rdf'])
id_hsterm = '{%s}' + res_id.name
hsterms_hs_identifier = etree.SubElement(dc_id_rdf_Description, id_hsterm % self.NAMESPACES['hsterms'])
hsterms_hs_identifier.text = res_id.url
if self.language:
dc_lang = etree.SubElement(rdf_Description, '{%s}language' % self.NAMESPACES['dc'])
dc_lang.text = self.language.code
if self.publisher:
dc_publisher = etree.SubElement(rdf_Description, '{%s}publisher' % self.NAMESPACES['dc'])
dc_pub_rdf_Description = etree.SubElement(dc_publisher, '{%s}Description' % self.NAMESPACES['rdf'])
hsterms_pub_name = etree.SubElement(dc_pub_rdf_Description, '{%s}publisherName' % self.NAMESPACES['hsterms'])
hsterms_pub_name.text = self.publisher.name
hsterms_pub_url = etree.SubElement(dc_pub_rdf_Description, '{%s}publisherURL' % self.NAMESPACES['hsterms'])
hsterms_pub_url.set('{%s}resource' % self.NAMESPACES['rdf'], self.publisher.url)
for rel in self.relations.all():
dc_relation = etree.SubElement(rdf_Description, '{%s}relation' % self.NAMESPACES['dc'])
dc_rel_rdf_Description = etree.SubElement(dc_relation, '{%s}Description' % self.NAMESPACES['rdf'])
rel_dcterm = '{%s}' + rel.type
dcterms_type = etree.SubElement(dc_rel_rdf_Description, rel_dcterm % self.NAMESPACES['dcterms'])
# check if the relation value starts with 'http://' or 'https://'
if rel.value.lower().find('http://') == 0 or rel.value.lower().find('https://') == 0:
dcterms_type.set('{%s}resource' % self.NAMESPACES['rdf'], rel.value)
else:
dcterms_type.text = rel.value
for src in self.sources.all():
dc_source = etree.SubElement(rdf_Description, '{%s}source' % self.NAMESPACES['dc'])
dc_source_rdf_Description = etree.SubElement(dc_source, '{%s}Description' % self.NAMESPACES['rdf'])
dcterms_derived_from = etree.SubElement(dc_source_rdf_Description, '{%s}isDerivedFrom' % self.NAMESPACES['dcterms'])
# if the source value starts with 'http://' or 'https://' add value as an attribute
if src.derived_from.lower().find('http://') == 0 or src.derived_from.lower().find('https://') == 0:
dcterms_derived_from.set('{%s}resource' % self.NAMESPACES['rdf'], src.derived_from)
else:
dcterms_derived_from.text = src.derived_from
if self.rights:
dc_rights = etree.SubElement(rdf_Description, '{%s}rights' % self.NAMESPACES['dc'])
dc_rights_rdf_Description = etree.SubElement(dc_rights, '{%s}Description' % self.NAMESPACES['rdf'])
hsterms_statement = etree.SubElement(dc_rights_rdf_Description, '{%s}rightsStatement' % self.NAMESPACES['hsterms'])
hsterms_statement.text = self.rights.statement
if self.rights.url:
hsterms_url = etree.SubElement(dc_rights_rdf_Description, '{%s}URL' % self.NAMESPACES['hsterms'])
hsterms_url.set('{%s}resource' % self.NAMESPACES['rdf'], self.rights.url)
for sub in self.subjects.all():
dc_subject = etree.SubElement(rdf_Description, '{%s}subject' % self.NAMESPACES['dc'])
if sub.value.lower().find('http://') == 0 or sub.value.lower().find('https://') == 0:
dc_subject.set('{%s}resource' % self.NAMESPACES['rdf'], sub.value)
else:
dc_subject.text = sub.value
return self.XML_HEADER + '\n' + etree.tostring(RDF_ROOT, pretty_print=True)
def _create_person_element(self, etree, parent_element, person):
if isinstance(person, Creator):
dc_person = etree.SubElement(parent_element, '{%s}creator' % self.NAMESPACES['dc'])
else:
dc_person = etree.SubElement(parent_element, '{%s}contributor' % self.NAMESPACES['dc'])
dc_person_rdf_Description = etree.SubElement(dc_person, '{%s}Description' % self.NAMESPACES['rdf'])
hsterms_name = etree.SubElement(dc_person_rdf_Description, '{%s}name' % self.NAMESPACES['hsterms'])
hsterms_name.text = person.name
if person.description:
dc_person_rdf_Description.set('{%s}about' % self.NAMESPACES['rdf'], person.description)
if isinstance(person, Creator):
hsterms_creatorOrder = etree.SubElement(dc_person_rdf_Description, '{%s}creatorOrder' % self.NAMESPACES['hsterms'])
hsterms_creatorOrder.text = str(person.order)
if person.organization:
hsterms_organization = etree.SubElement(dc_person_rdf_Description, '{%s}organization' % self.NAMESPACES['hsterms'])
hsterms_organization.text = person.organization
if person.email:
hsterms_email = etree.SubElement(dc_person_rdf_Description, '{%s}email' % self.NAMESPACES['hsterms'])
hsterms_email.text = person.email
if person.address:
hsterms_address = etree.SubElement(dc_person_rdf_Description, '{%s}address' % self.NAMESPACES['hsterms'])
hsterms_address.text = person.address
if person.phone:
hsterms_phone = etree.SubElement(dc_person_rdf_Description, '{%s}phone' % self.NAMESPACES['hsterms'])
hsterms_phone.set('{%s}resource' % self.NAMESPACES['rdf'], 'tel:' + person.phone)
if person.homepage:
hsterms_homepage = etree.SubElement(dc_person_rdf_Description, '{%s}homepage' % self.NAMESPACES['hsterms'])
hsterms_homepage.set('{%s}resource' % self.NAMESPACES['rdf'], person.homepage)
if person.researcherID:
hsterms_researcherID = etree.SubElement(dc_person_rdf_Description, '{%s}researcherID' % self.NAMESPACES['hsterms'])
hsterms_researcherID.set('{%s}resource' % self.NAMESPACES['rdf'], person.researcherID)
if person.researchGateID:
hsterms_researchGateID = etree.SubElement(dc_person_rdf_Description, '{%s}researchGateID' % self.NAMESPACES['hsterms'])
hsterms_researchGateID.set('{%s}resource' % self.NAMESPACES['rdf'], person.researcherID)
def create_element(self, element_model_name, **kwargs):
element_model_name = element_model_name.lower()
if not self._is_valid_element(element_model_name):
raise ValidationError("Metadata element type:%s is not one of the supported in core metadata elements."
% element_model_name)
model = ContentType.objects.get(model=element_model_name)
if model:
if issubclass(model.model_class(), AbstractMetaDataElement):
kwargs['metadata_obj']= self
element = model.model_class().create(**kwargs)
element.save()
else:
raise ValidationError("Metadata element type:%s is not supported." % element_model_name)
else:
raise ValidationError("Metadata element type:%s is not supported." % element_model_name)
def update_element(self, element_model_name, element_id, **kwargs):
element_model_name = element_model_name.lower()
model_type = ContentType.objects.get(model=element_model_name)
if model_type:
if issubclass(model_type.model_class(), AbstractMetaDataElement):
kwargs['metadata_obj']= self
model_type.model_class().update(element_id, **kwargs)
else:
raise ValidationError("Metadata element type:%s is not supported." % element_model_name)
else:
raise ValidationError("Metadata element type:%s is not supported." % element_model_name)
def delete_element(self, element_model_name, element_id):
element_model_name = element_model_name.lower()
model_type = ContentType.objects.get(model=element_model_name)
if model_type:
if issubclass(model_type.model_class(), AbstractMetaDataElement):
model_type.model_class().remove(element_id)
else:
raise ValidationError("Metadata element type:%s is not supported." % element_model_name)
else:
raise ValidationError("Metadata element type:%s is not supported." % element_model_name)
def _is_valid_element(self, element_name):
allowed_elements = [el.lower() for el in self.get_supported_element_names()]
return element_name.lower() in allowed_elements
def resource_processor(request, page):
extra = page_permissions_page_processor(request, page)
extra['res'] = page.get_content_model()
extra['dc'] = { m.term_name : m.content for m in extra['res'].dublin_metadata.all() }
return extra
processor_for(GenericResource)(resource_processor)
@processor_for('resources')
def resource_listing_processor(request, page):
owned_resources = list(GenericResource.objects.filter(owners__pk=request.user.pk))
editable_resources = list(GenericResource.objects.filter(owners__pk=request.user.pk))
viewable_resources = list(GenericResource.objects.filter(public=True))
return locals()
@receiver(post_save)
def resource_creation_signal_handler(sender, instance, created, **kwargs):
"""Create initial dublin core elements"""
if isinstance(instance, AbstractResource):
if created:
from hs_core.hydroshare import utils
import json
instance.metadata.create_element('title', value=instance.title)
if instance.content:
instance.metadata.create_element('description', abstract=instance.content)
else:
instance.metadata.create_element('description', abstract=instance.description)
# TODO: With the current VM the get_user_info() method fails. So we can't get the resource uri for
# the user now.
# creator_dict = users.get_user_info(instance.creator)
# instance.metadata.create_element('creator', name=instance.creator.get_full_name(),
# email=instance.creator.email,
# description=creator_dict['resource_uri'])
instance.metadata.create_element('creator', name=instance.creator.get_full_name(), email=instance.creator.email)
# TODO: The element 'Type' can't be created as we do not have an URI for specific resource types yet
instance.metadata.create_element('date', type='created', start_date=instance.created)
instance.metadata.create_element('date', type='modified', start_date=instance.updated)
# res_json = utils.serialize_science_metadata(instance)
# res_dict = json.loads(res_json)
instance.metadata.create_element('identifier', name='hydroShareIdentifier', url='http://hydroshare.org/resource{0}{1}'.format('/', instance.short_id))
else:
resource_update_signal_handler(sender, instance, created, **kwargs)
if isinstance(AbstractResource, sender):
if created:
instance.dublin_metadata.create(term='T', content=instance.title)
instance.dublin_metadata.create(term='CR', content=instance.user.username)
if instance.last_updated_by:
instance.dublin_metadata.create(term='CN', content=instance.last_updated_by.username)
instance.dublin_metadata.create(term='DT', content=instance.created)
if instance.content:
instance.dublin_metadata.create(term='AB', content=instance.content)
else:
resource_update_signal_handler(sender, instance, created, **kwargs)
def resource_update_signal_handler(sender, instance, created, **kwargs):
"""Add dublin core metadata based on the person who just updated the resource. Handle publishing too..."""
@receiver(post_save, sender=User)
def user_creation_signal_handler(sender, instance, created, **kwargs):
if created:
if not instance.is_staff:
instance.is_staff = True
instance.save()
instance.groups.add(Group.objects.get(name='Hydroshare Author'))
| {
"content_hash": "1b9965e071319dae528c04404d78b291",
"timestamp": "",
"source": "github",
"line_count": 1850,
"max_line_length": 163,
"avg_line_length": 44.821621621621624,
"alnum_prop": 0.5816811384466956,
"repo_name": "hydroshare/hydroshare_temp",
"id": "3a707eaea08c289ec25aa4ab53f66872c07b1d09",
"size": "82920",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hs_core/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "173515"
},
{
"name": "C++",
"bytes": "4136"
},
{
"name": "CSS",
"bytes": "228598"
},
{
"name": "CoffeeScript",
"bytes": "34267"
},
{
"name": "JavaScript",
"bytes": "736373"
},
{
"name": "Python",
"bytes": "1870088"
},
{
"name": "Shell",
"bytes": "5335"
},
{
"name": "XSLT",
"bytes": "790987"
}
],
"symlink_target": ""
} |
class Solution(object):
def selfDividingNumbers(self, left, right):
"""
:type left: int
:type right: int
:rtype: List[int]
"""
def isDividingNumber(num):
n = num
while n > 0:
if (n%10) == 0 or (num%(n%10)) != 0:
return False
n /= 10
return True
result = []
for num in xrange(left, right+1):
if isDividingNumber(num):
result.append(num)
return result
| {
"content_hash": "ec3c38eb142d93163934dc8cb4364ade",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 52,
"avg_line_length": 27.15,
"alnum_prop": 0.44014732965009207,
"repo_name": "tudennis/LeetCode---kamyu104-11-24-2015",
"id": "d3428690d778c719b891ae467560f2b573e9ff04",
"size": "1173",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Python/self-dividing-numbers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "980817"
},
{
"name": "Go",
"bytes": "1907"
},
{
"name": "Java",
"bytes": "8367"
},
{
"name": "Python",
"bytes": "1365305"
},
{
"name": "SQLPL",
"bytes": "822"
},
{
"name": "Shell",
"bytes": "3218"
}
],
"symlink_target": ""
} |
"""
Generic sequence prediction script using CoNLL format.
"""
from __future__ import print_function
import sys
from seqlearn.datasets import load_conll
from seqlearn.evaluation import bio_f_score
from seqlearn.perceptron import StructuredPerceptron
from sklearn.metrics import accuracy_score
def features(sentence, i):
"""Features for i'th token in sentence.
Currently baseline named-entity recognition features, but these can
easily be changed to do POS tagging or chunking.
"""
word = sentence[i]
yield "word:{}".format(word.lower())
if word[0].isupper():
yield "CAP"
if i > 0:
yield "word-1:{}".format(sentence[i - 1].lower())
if i + 1 < len(sentence):
yield "word+1:{}".format(sentence[i + 1].lower())
def describe(X, lengths):
print("{0} sequences, {1} tokens.".format(len(lengths), X.shape[0]))
if __name__ == "__main__":
print(__doc__)
if len(sys.argv) < 3:
print("Usage: {0} training_file test_file".format(sys.argv[0]))
sys.exit(1)
print("Loading training data...", end=" ")
X_train, y_train, lengths_train = load_conll(sys.argv[1], features)
describe(X_train, lengths_train)
print("Loading test data...", end=" ")
X_test, y_test, lengths_test = load_conll(sys.argv[2], features)
describe(X_test, lengths_test)
clf = StructuredPerceptron(verbose=True, max_iter=10)
print("Training %s" % clf)
clf.fit(X_train, y_train, lengths_train)
y_pred = clf.predict(X_test, lengths_test)
print("Accuracy: %.3f" % (100 * accuracy_score(y_test, y_pred)))
print("CoNLL F1: %.3f" % (100 * bio_f_score(y_test, y_pred)))
| {
"content_hash": "c2680a3f3033d3b7689f61a6284f9c6e",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 72,
"avg_line_length": 28.25423728813559,
"alnum_prop": 0.6412717456508699,
"repo_name": "fgregg/seqlearn",
"id": "557173304058ffed7ba9b2f33388446e57cb2031",
"size": "1726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/conll.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
"""
Internally used submodule that contains runtime patches
to the standard multiprocessing library.
.. warning: Do not import this submodule in your code!
This is only meant to be imported from the
processmanager submodule!
"""
__license__ = """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__all__ = []
#------------------------------------------------------------------------------
# This patches the multiprocessing module in runtime to prevent bogus error
# messages to be shown when Control-C is pressed by the user.
#------------------------------------------------------------------------------
import sys
from os import path
from signal import signal, SIGINT
# Signal handler that kills the current process.
# This should trigger a chain reaction when Control-C is pressed.
def __suicide(signum, frame):
exit(1)
signal(SIGINT, __suicide)
# Mimics a file object well enough to suppress print messages.
# Also faster than opening a file descriptor for /dev/null.
class __FakeFile(object):
def write(self, s):
pass
def flush(self):
pass
def close(self):
pass
# Get the original values for stdout and stderr.
__orig_stdout, __orig_stderr = sys.stdout, sys.stderr
# Our wrapper to the bootstrap function.
# It replaces stdout and stderr with a fake file object,
# and sets a signal handler to commit suicide on Control-C.
def __patched_bootstrap(self):
signal(SIGINT, __suicide)
stdout, stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = __FakeFile(), __FakeFile()
try:
return __original_bootstrap(self)
finally:
sys.stdout, sys.stderr = stdout, stderr
if sys.platform == "win32":
# Wraps around get_command_line()
# to set our own main() function in new processes.
def __patched_get_command_line():
# Calculate the command line that would normally be used.
args = _original_get_command_line()
# Make sure everything looks like we expect it.
# A new version of the multiprocessing module might break this,
# but since Python 2.7 is in bugfix mode, this is unlikely.
# We're checking anyway, just in case.
assert args[-3] == '-c', \
"internal error, are you sure this is Python 2.7?"
assert args[-2] == 'from multiprocessing.forking import main; main()',\
"internal error, are you sure this is Python 2.7?"
assert args[-1] == '--multiprocessing-fork', \
"internal error, are you sure this is Python 2.7?"
# Calculate the new startup code to run on new processes.
# It calls our main function instead of the real one.
here = path.abspath(path.join(path.split(__file__)[0], "..", ".."))
tpl = path.join(here, "thirdparty_libs")
code = "import sys; "
if path.exists(tpl):
ehere = here.replace("'", "\\'").replace('"', '\\x%.2x' % ord('"'))
etpl = tpl.replace("'", "\\'").replace('"', '\\x%.2x' % ord('"'))
code += "here = '%s'; " % ehere
code += "sys.path.insert(0, here); "
code += "tpl = '%s'; " % etpl
code += "sys.path.insert(0, tpl); "
code += "from golismero.patches.mp import main; main()"
# Patch the command line and return it.
args[-2] = code
return args
# Our wrapper works around a sanity check of multiprocessing.
# At one point it checks the main module wasn't already loaded.
# However the check fails because our launch script (golismero.py)
# results in the same module name as ourselves (golismero/__init__.py).
def __patched_prepare(data):
golismero = sys.modules["golismero"]
try:
del sys.modules["golismero"]
return _original_prepare(data)
finally:
sys.modules["golismero"] = golismero
# Our wrapper to the main() function of new processes.
# It replaces stdout and stderr with a fake file object.
def main():
from multiprocessing.forking import main as original_main
from multiprocessing import Process
if Process._bootstrap.__name__ != "__patched_bootstrap":
global __original_bootstrap
__original_bootstrap = Process._bootstrap
Process._bootstrap = __patched_bootstrap
stdout, stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = __FakeFile(), __FakeFile()
try:
original_main()
finally:
sys.stdout, sys.stderr = stdout, stderr
if __name__ != "__parents_main__" and __name__ != "__main__":
# Patch the bootstrap function for child processes.
from multiprocessing import Process
if Process._bootstrap.__name__ != "__patched_bootstrap":
__original_bootstrap = Process._bootstrap
Process._bootstrap = __patched_bootstrap
if sys.platform == "win32":
from multiprocessing import forking
# Patch the function that calculates
# the command line for child processes.
from multiprocessing.forking import get_command_line as \
_original_get_command_line
forking.get_command_line = __patched_get_command_line
# Patch the function that prepares the data for child processes.
from multiprocessing.forking import prepare as _original_prepare
forking.prepare = __patched_prepare
# Undoes the patches. This is required to be able to reload GoLismero.
def undo():
Process._bootstrap = __original_bootstrap
if sys.platform == "win32":
forking.get_command_line = _original_get_command_line
forking.prepare = _original_prepare
if __orig_stdout is not None and hasattr(sys.stdout, "__class__") and \
sys.stdout.__class__.__name__ == __FakeFile.__name__:
sys.stdout = __orig_stdout
if __orig_stderr is not None and hasattr(sys.stderr, "__class__") and \
sys.stderr.__class__.__name__ == __FakeFile.__name__:
sys.stderr = __orig_stderr
| {
"content_hash": "c7e6eda1a11c5dc7fb8939b49e47d723",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 79,
"avg_line_length": 39.85549132947977,
"alnum_prop": 0.6278462654097172,
"repo_name": "JeyZeta/Dangerous",
"id": "d9c107706a03a200b07add25d71e6386290157e8",
"size": "6942",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "Dangerous/Golismero/golismero/patches/mp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "13260"
},
{
"name": "C",
"bytes": "12851"
},
{
"name": "C++",
"bytes": "3174"
},
{
"name": "CSS",
"bytes": "267451"
},
{
"name": "HTML",
"bytes": "2686153"
},
{
"name": "JavaScript",
"bytes": "1356956"
},
{
"name": "Lua",
"bytes": "14436"
},
{
"name": "Makefile",
"bytes": "11190"
},
{
"name": "Objective-C",
"bytes": "998"
},
{
"name": "PHP",
"bytes": "619"
},
{
"name": "PLpgSQL",
"bytes": "536"
},
{
"name": "Perl",
"bytes": "263365"
},
{
"name": "Python",
"bytes": "16669102"
},
{
"name": "Roff",
"bytes": "9828"
},
{
"name": "Ruby",
"bytes": "503"
},
{
"name": "Shell",
"bytes": "6691"
}
],
"symlink_target": ""
} |
"""Support for RESTful API sensors."""
import json
import logging
from xml.parsers.expat import ExpatError
from jsonpath import jsonpath
import voluptuous as vol
import xmltodict
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN, PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_FORCE_UPDATE,
CONF_NAME,
CONF_RESOURCE,
CONF_RESOURCE_TEMPLATE,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from . import async_get_config_and_coordinator, create_rest_data_from_config
from .const import CONF_JSON_ATTRS, CONF_JSON_ATTRS_PATH
from .entity import RestEntity
from .schema import RESOURCE_SCHEMA, SENSOR_SCHEMA
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({**RESOURCE_SCHEMA, **SENSOR_SCHEMA})
PLATFORM_SCHEMA = vol.All(
cv.has_at_least_one_key(CONF_RESOURCE, CONF_RESOURCE_TEMPLATE), PLATFORM_SCHEMA
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the RESTful sensor."""
# Must update the sensor now (including fetching the rest resource) to
# ensure it's updating its state.
if discovery_info is not None:
conf, coordinator, rest = await async_get_config_and_coordinator(
hass, SENSOR_DOMAIN, discovery_info
)
else:
conf = config
coordinator = None
rest = create_rest_data_from_config(hass, conf)
await rest.async_update()
if rest.data is None:
raise PlatformNotReady
name = conf.get(CONF_NAME)
unit = conf.get(CONF_UNIT_OF_MEASUREMENT)
device_class = conf.get(CONF_DEVICE_CLASS)
json_attrs = conf.get(CONF_JSON_ATTRS)
json_attrs_path = conf.get(CONF_JSON_ATTRS_PATH)
value_template = conf.get(CONF_VALUE_TEMPLATE)
force_update = conf.get(CONF_FORCE_UPDATE)
resource_template = conf.get(CONF_RESOURCE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
async_add_entities(
[
RestSensor(
coordinator,
rest,
name,
unit,
device_class,
value_template,
json_attrs,
force_update,
resource_template,
json_attrs_path,
)
],
)
class RestSensor(RestEntity):
"""Implementation of a REST sensor."""
def __init__(
self,
coordinator,
rest,
name,
unit_of_measurement,
device_class,
value_template,
json_attrs,
force_update,
resource_template,
json_attrs_path,
):
"""Initialize the REST sensor."""
super().__init__(
coordinator, rest, name, device_class, resource_template, force_update
)
self._state = None
self._unit_of_measurement = unit_of_measurement
self._value_template = value_template
self._json_attrs = json_attrs
self._attributes = None
self._json_attrs_path = json_attrs_path
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def _update_from_rest_data(self):
"""Update state from the rest data."""
value = self.rest.data
_LOGGER.debug("Data fetched from resource: %s", value)
if self.rest.headers is not None:
# If the http request failed, headers will be None
content_type = self.rest.headers.get("content-type")
if content_type and (
content_type.startswith("text/xml")
or content_type.startswith("application/xml")
or content_type.startswith("application/xhtml+xml")
):
try:
value = json.dumps(xmltodict.parse(value))
_LOGGER.debug("JSON converted from XML: %s", value)
except ExpatError:
_LOGGER.warning(
"REST xml result could not be parsed and converted to JSON"
)
_LOGGER.debug("Erroneous XML: %s", value)
if self._json_attrs:
self._attributes = {}
if value:
try:
json_dict = json.loads(value)
if self._json_attrs_path is not None:
json_dict = jsonpath(json_dict, self._json_attrs_path)
# jsonpath will always store the result in json_dict[0]
# so the next line happens to work exactly as needed to
# find the result
if isinstance(json_dict, list):
json_dict = json_dict[0]
if isinstance(json_dict, dict):
attrs = {
k: json_dict[k] for k in self._json_attrs if k in json_dict
}
self._attributes = attrs
else:
_LOGGER.warning(
"JSON result was not a dictionary"
" or list with 0th element a dictionary"
)
except ValueError:
_LOGGER.warning("REST result could not be parsed as JSON")
_LOGGER.debug("Erroneous JSON: %s", value)
else:
_LOGGER.warning("Empty reply found when expecting JSON data")
if value is not None and self._value_template is not None:
value = self._value_template.async_render_with_possible_json_value(
value, None
)
self._state = value
| {
"content_hash": "8719d5a450e45a7180db419ffee7ebe3",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 87,
"avg_line_length": 33.582417582417584,
"alnum_prop": 0.5700261780104712,
"repo_name": "partofthething/home-assistant",
"id": "0699d9dc07c5a65b5cc65948c3ff0cfbb4d73327",
"size": "6112",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/rest/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
__author__ = 'Sukrit'
import pandas as pd
import csv
f = open('../output/both_journal_list.txt', 'r') #reading list of journals present in Aminer and Elesevier
x = f.readlines()
f.close()
bothjs = []
for line in x:
bothjs.append(line.rstrip()) # list of common journals, removing '\n'
# OUR SNIP
our_SNIP = pd.read_csv('../output/SNIP_all_journals.csv',usecols=[1,3]) # reading SNIP values calculated by US
our_SNIP.sort_values('Jname',inplace = True) #sorting alphabetically
our_SNIP = our_SNIP[ our_SNIP['SNIP'] != 0 ]
our_SNIP2 = pd.DataFrame()
our_SNIP2 = our_SNIP2.append(our_SNIP,ignore_index = True) #resetting index value
our_SNIP = our_SNIP2
oSNIPnames = our_SNIP['Jname']
#print our_SNIP
# SNIP
SNIP = pd.read_csv("../data/journal_SNIP_values.csv")
journals = SNIP['Source Title'] #taking only 'Source Title' column
SNIPlist = []
'''
for jname in oSNIPnames :
for name in journals :
if jname == name :
SNIPlist.append(jname)
'''
SNIP_2 = pd.DataFrame()
#now we corroborate SNIP values.
for name in oSNIPnames :
SNIP_2 = SNIP_2.append(SNIP[ SNIP['Source Title'] == name ],ignore_index = True) # copy all SNIP/IPP values for which we have citations in Aminer
#print SNIP_2
SNIP_2010 = SNIP_2[['Source Title','2010 SNIP']].copy() # copying 'Source Title' and '2010 SNIP' columns to new df
SNIP_2010 = SNIP_2010.fillna(0) # replacing NaN values with 0
#print SNIP_2010
#print our_SNIP
xarray = []
xarray2 = []
yarray = []
yarray2 = []
names = []
for name in oSNIPnames :
a = our_SNIP['SNIP'][ our_SNIP['Jname']== name ].values
b = SNIP_2010['2010 SNIP'][ SNIP_2010['Source Title'] == name ].values
#if ( a != 0 and a <10 and b != 0 and b < 10 and a > 0.01 ) :
if ( a != 0 and b!= 0 ) :
xarray.append(our_SNIP['SNIP'][our_SNIP['Jname']== name ].values)
yarray.append(SNIP_2010['2010 SNIP'][SNIP_2010['Source Title'] == name ].values)
names.append(name)
yarray = [float(i) for i in yarray]
for item in yarray :
yarray2.append(item)
xarray = [float(i) for i in xarray]
for item in xarray :
xarray2.append(item)
print len(xarray)
print len(yarray)
print len(names)
print "\n\n"
data = [names,xarray,yarray]
with open('../data/SNIP_ourSNIP_ALL.csv','wb') as f:
out = csv.writer(f, delimiter=',',quoting=csv.QUOTE_ALL)
out.writerows(zip(*data)) | {
"content_hash": "1cc52fe7092ddc66f5fd4907121427d5",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 153,
"avg_line_length": 25.170212765957448,
"alnum_prop": 0.6538461538461539,
"repo_name": "SciBase-Project/internationality-journals",
"id": "9ec1b1ccb17c47789ee87f6645c49f434b4e4694",
"size": "2366",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/SNIPvsourSNIPv2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "89805"
}
],
"symlink_target": ""
} |
"""
autoexam api
"""
import os, subprocess, random, jinja2
import sys
import autoexam
from os import system as run
master = 'master.txt'
# project_path = None
# region Helpers
def get_flag(kwargs, flag):
if kwargs.get(flag) == True:
return '--' + flag.replace('_', '-')
return ''
def get_value(kwargs, field, default=None):
value = kwargs.get(field, default)
if value != None:
return '--' + field.replace('_', '-') + ' ' + str(value)
return ''
def save_master(master_text):
with open(master, 'w') as fp:
fp.write(master_text.encode('utf-8'))
# endregion Helpers
# region Autoexam Methods
def init(name, folder='.', template='', **kwargs):
"""
kwargs:
=======
@folder: project folder
@election: (...)
@questionnaire: (...)
"""
folder = '-f "%s"'%folder
# folder = get_value(kwargs, 'folder', '.')
election = get_flag(kwargs, 'election')
questionnaire = get_flag(kwargs, 'questionnaire')
# params = [autoexam, 'init', folder, election, questionnaire, '"%s"'%name]
params = ['autoexam', 'new', folder, '"%s"'%name]
cmd = ' '.join(params)
return os.system(cmd)
def gen(**kwargs):
"""
kwargs:
=======
@seed: (...)
@tests_count: (...)
@answers_per_page: (...)
@title: (...)
@answer_template: (...)
@master_template: (...)
@text_template: (...)
@questions_value: (...)
@dont_shuffle_tags: (...)
@sort_questions: (...)
@dont_shuffle_options: (...)
@dont_generate_text: (...)
@election: (...)
@questionnaire: (...)
@dont_generate_master: (...)
"""
seed = get_value(kwargs, 'seed', random.randint(0, 2**64 - 1))
tests_count = get_value(kwargs, 'tests_count', 1)
answers_per_page = get_value(kwargs, 'answers_per_page', 1)
title = get_value(kwargs, 'title')
answer_template = get_value(kwargs, 'answer_template')
master_template = get_value(kwargs, 'master_template')
text_template = get_value(kwargs, 'text_template')
questions_value = get_value(kwargs, 'questions_value')
dont_shuffle_tags = get_flag(kwargs, 'dont_shuffle_tags')
sort_questions = get_flag(kwargs, 'sort_questions')
dont_shuffle_options = get_flag(kwargs, 'dont_shuffle_options')
dont_generate_text = get_flag(kwargs, 'dont_generate_text')
election = get_flag(kwargs, 'election')
questionnaire = get_flag(kwargs, 'questionnaire')
dont_generate_master = get_flag(kwargs, 'dont_generate_master')
params = ['autoexam', 'gen', seed, tests_count, answers_per_page,
title, answer_template, master_template, text_template,
questions_value, dont_shuffle_tags, sort_questions, dont_shuffle_options,
dont_generate_text, election, questionnaire, dont_generate_master]
cmd = ' '.join(params)
return run(cmd)
def grade(**kwargs):
"""
TODO
"""
run('autoexam grade')
# endregion Autoexam methods
def validate_project(project):
for question in project.questions:
if not question.tag_names:
# TODO: Tr (translator)
raise Exception("There must be at least one tag per question.")
if question.text is "":
raise Exception("There is at least one question with empty text.")
answers = set()
for answer in question.answers:
if answer.text in answers:
raise Exception("There cannot be two answers with the same text in the same question.")
answers.add(answer.text)
def render_master(project, template_path):
return jinja2.Template(open(template_path).read().decode('utf-8')).render(project=project)
# def add_scan_event_subscriber(obj):
# autoexam.add_scan_subscriber(obj)
#
#
# def remove_scan_event_subscriber(obj):
# autoexam.remove_scan_subscriber(obj)
def scan(_args):
try:
import subprocess
print subprocess.check_output('autoexam scan \
--autowrite \
-c {camera_number}\
-o {outfile}'.format(
camera_number=_args.cameras[0],
outfile='generated/last/results.json'
).split()
)
return True
except subprocess.CalledProcessError:
return False
# autoexam.scan(_args)
| {
"content_hash": "acf123fa229f8c2d92afd02f4ee5c5b3",
"timestamp": "",
"source": "github",
"line_count": 157,
"max_line_length": 103,
"avg_line_length": 27.54140127388535,
"alnum_prop": 0.6006012950971323,
"repo_name": "matcom/autoexam",
"id": "9bfd63cbc30507b9927a8feb13c7a76a94e5f210",
"size": "4364",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qtui/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3013"
},
{
"name": "CoffeeScript",
"bytes": "1370"
},
{
"name": "HTML",
"bytes": "156269"
},
{
"name": "JavaScript",
"bytes": "694"
},
{
"name": "Python",
"bytes": "194390"
},
{
"name": "Ruby",
"bytes": "62429"
},
{
"name": "Shell",
"bytes": "1064"
},
{
"name": "TeX",
"bytes": "7844"
}
],
"symlink_target": ""
} |
from django.views.generic import ListView
from django.http import HttpResponse
from models import Job
from geoq.maps.models import FeatureType
from django.shortcuts import get_object_or_404
from datetime import datetime
from pytz import timezone
from webcolors import name_to_hex, normalize_hex
class JobKML(ListView):
model = Job
def get(self, request, *args, **kwargs):
job = get_object_or_404(Job, pk=self.kwargs.get('pk'))
feature_types = FeatureType.objects.all()
aoi_count = job.total_count()
aoi_complete = job.complete_count()
aoi_work = job.in_work_count()
cookie_url_trailer = get_cookie_trailer(request)
description = 'Job #'+str(job.id)+': '+str(job.name)+'\n'+str(job.project.name)+'\n'
if aoi_count == 0:
output = '<?xml version="1.0" encoding="UTF-8"?>\n'
output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n'
output += ' <Document>\n'
output += ' <name>Empty Job</name>\n'
output += ' <description>'+description+'</description>\n'
output += ' </Document>\n'
output += '</kml>\n'
return HttpResponse(output, mimetype="application/vnd.google-earth.kml+xml", status=200)
aoi_comp_pct = (100 * float(aoi_complete)/float(aoi_count))
aoi_work_pct = int(100 * float(aoi_work)/float(aoi_count))
aoi_tot_pct = int(100 * float(aoi_work+aoi_complete)/float(aoi_count))
doc_name = 'GeoQ C:'+str(aoi_complete)+', W:'+str(aoi_work)+', Tot:'+str(aoi_count)+' ['+str(aoi_tot_pct)+'%]'
description = description + 'Complete Cells: ' + str(aoi_complete) + ' ['+str(aoi_comp_pct)+'%], In Work: ' + str(aoi_work) + ' ['+str(aoi_work_pct)+'%], Total: ' + str(aoi_count)
output = '<?xml version="1.0" encoding="UTF-8"?>\n'
output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n'
output += ' <Document>\n'
output += ' <name>'+doc_name+'</name>\n'
output += ' <description>'+description+'</description>\n'
output += ' <Style id="geoq_inwork">\n'
output += ' <LineStyle>\n'
output += ' <width>4</width>\n'
output += ' <color>7f0186cf</color>\n'
output += ' </LineStyle>\n'
output += ' <PolyStyle>\n'
output += ' <fill>0</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
output += ' </Style>\n'
output += ' <Style id="geoq_complete">\n'
output += ' <LineStyle>\n'
output += ' <width>3</width>\n'
output += ' <color>7f0101cf</color>\n'
output += ' </LineStyle>\n'
output += ' <PolyStyle>\n'
output += ' <fill>0</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
output += ' </Style>\n'
output += ' <Style id="geoq_unassigned">\n'
output += ' <LineStyle>\n'
output += ' <width>2</width>\n'
output += ' <color>7f00ff00</color>\n'
output += ' </LineStyle>\n'
output += ' <PolyStyle>\n'
output += ' <fill>0</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
output += ' </Style>\n'
for feature in feature_types:
output += ' <Style id="geoq_'+str(feature.id)+'">\n'
out_color = '7f0066ff'
if feature.style == None:
output += ' </Style>\n'
continue
if feature.style.has_key('color'):
color = feature.style['color']
#convert to a kml-recognized color
if color[0:1] == '#' and len(color) == 4:
color = normalize_hex(color)
try:
c = name_to_hex(color)
out_color = '7f' + c[5:7] + c[3:5] + c[1:3]
except Exception:
out_color = '7f0066ff'
output += ' <PolyStyle>\n'
output += ' <color>'+out_color+'</color>\n'
output += ' <colorMode>normal</colorMode>\n'
output += ' <fill>1</fill>\n'
output += ' <outline>1</outline>\n'
output += ' </PolyStyle>\n'
if feature.style.has_key('weight'):
output += ' <LineStyle>\n'
output += ' <width>'+str(feature.style['weight'])+'</width>\n'
if feature.style.has_key('color'):
output += ' <color>'+out_color+'</color>\n'
output += ' </LineStyle>\n'
if feature.style.has_key('iconUrl'):
icon_url = str(feature.style['iconUrl'])
if not icon_url.startswith("http"):
icon_url = request.build_absolute_uri(icon_url)
else:
icon_url += cookie_url_trailer
output += ' <IconStyle>\n'
output += ' <Icon>\n'
output += ' <href>'+icon_url+'</href>\n'
output += ' </Icon>\n'
output += ' </IconStyle>\n'
output += ' </Style>\n'
# locations = job.feature_set.all().order_by('template')
locations = job.feature_set.all()\
.extra(tables=['maps_featuretype'])\
.extra(where=['maps_featuretype.id=maps_feature.template_id'])\
.order_by('maps_featuretype.name')
last_template = ""
skip_the_first = True
template_has_started = False
for loc in locations:
template_name = str(loc.template.name)
if template_name != last_template:
if skip_the_first:
skip_the_first = False
else:
output += ' </Folder>\n'
output += ' <Folder><name>'+template_name+'</name>\n'
last_template = template_name
template_has_started = True
analyst_name = str(loc.analyst.username)
dtg = str(loc.created_at)
job_id = str(loc.job.id)
#TODO: Add links to Jobs and Projects
datetime_obj = datetime.strptime(dtg, "%Y-%m-%d %H:%M:%S.%f+00:00")
datetime_obj_utc = datetime_obj.replace(tzinfo=timezone('UTC'))
date_time = datetime_obj_utc.strftime('%Y-%m-%dT%H:%M:%SZ')
date_time_desc = datetime_obj_utc.strftime('%Y-%m-%d %H:%M:%S')
desc = 'Posted by '+analyst_name+' at '+date_time_desc+' Zulu (UTC) in Job #'+job_id
#TODO: Add more details
#TODO: Add links to linked objects
#Simplify polygons to reduce points in complex shapes
if loc.the_geom.num_coords > 0: #skip empty locations
simplegeom = loc.the_geom.simplify(0.0002)
if simplegeom.num_coords > 0:
kml = str(loc.the_geom.simplify(0.0002).kml)
else:
kml = str(loc.the_geom.kml)
if '<Polygon><outerBoundaryIs><LinearRing><coordinates>' in kml:
add_text = '<altitudeMode>clampToGround</altitudeMode>'
kml = kml.replace('<coordinates>', add_text+'<coordinates>')
kml = kml.replace('</outerBoundaryIs></Polygon><Polygon><outerBoundaryIs><LinearRing>', '')
output += ' <Placemark><name>'+template_name+'</name>\n'
output += ' <TimeStamp><when>'+date_time+'</when></TimeStamp>\n'
output += ' <description>'+desc+'</description>\n'
output += ' <styleUrl>#geoq_'+str(loc.template.id)+'</styleUrl>\n'
output += ' '+str(kml)+'\n'
output += ' </Placemark>\n'
if template_has_started:
output += ' </Folder>\n'
output += ' <Folder><name>Work Cells</name>\n'
aois = job.aois.order_by('status')
for aoi in aois:
style = 'complete'
if aoi.status == 'In work':
style = 'inwork'
if aoi.status == 'Unassigned':
style = 'unassigned'
aoi_name = "#"+str(aoi.id)+", "+str(aoi.status)+" - Priority:"+str(aoi.priority)
kml = str(aoi.polygon.simplify(0.0002).kml)
if '<Polygon><outerBoundaryIs><LinearRing><coordinates>' in kml:
add_text = '<tessellate>1</tessellate><altitudeMode>clampToGround</altitudeMode>'
kml = kml.replace('<coordinates>', add_text+'<coordinates>')
output += ' <Placemark>\n'
output += ' <name>'+aoi_name+'</name>\n'
output += ' <styleUrl>#geoq_'+style+'</styleUrl>\n'
output += ' '+kml+'\n'
output += ' </Placemark>\n'
output += ' </Folder>\n'
output += ' </Document>\n'
output += '</kml>'
return HttpResponse(output, mimetype="application/vnd.google-earth.kml+xml", status=200)
def get_cookie_trailer(request):
cookies_to_look_for = ['iPlanetDirectoryPro'] #TODO: Pull this from an admin setting
cookie_url_trailer = ''
for cook in cookies_to_look_for:
cookie = request.COOKIES.get(cook, None)
if cookie:
cookie_url_trailer += cook + "=" + cookie
if cookie_url_trailer:
cookie_url_trailer = "?" + cookie_url_trailer
return cookie_url_trailer
class JobKMLNetworkLink(ListView):
model = Job
def get(self, request, *args, **kwargs):
id = self.kwargs.get('pk')
job = get_object_or_404(Job, pk=id)
setting_zoom_auto = True #TODO: Pull from settings
settings_refresh_every = 90 #TODO: Pull from settings
cookie_url_trailer = get_cookie_trailer(request)
url = request.build_absolute_uri('/geoq/api/job/'+id+'.kml' + cookie_url_trailer)
aoi_count = job.total_count()
aoi_complete = job.complete_count()
aoi_work = job.in_work_count()
aoi_comp_pct = int(100 * float(aoi_complete)/float(aoi_count)) if aoi_count > 0 else 0
aoi_work_pct = int(100 * float(aoi_work)/float(aoi_count)) if aoi_count > 0 else 0
aoi_tot_pct = int(100 * float(aoi_work+aoi_complete)/float(aoi_count)) if aoi_count > 0 else 0
doc_name = 'GeoQ C:'+str(aoi_complete)+', W:'+str(aoi_work)+', Tot:'+str(aoi_count)+' ['+str(aoi_tot_pct)+'%]'
description = 'Job #'+str(job.id)+': '+str(job.name)+'\n'+str(job.project.name)+'\n'
description = description + 'Complete Cells: ' + str(aoi_complete) + ' ['+str(aoi_comp_pct)+'%], In Work: ' + str(aoi_work) + ' ['+str(aoi_work_pct)+'%], Total: ' + str(aoi_count)
output = '<?xml version="1.0" encoding="UTF-8"?>\n'
output += '<kml xmlns="http://www.opengis.net/kml/2.2">\n'
output += ' <Folder>\n'
output += ' <name>GeoQ Worked Cells</name>\n'
output += ' <visibility>1</visibility>\n'
output += ' <open>1</open>\n'
output += ' <description>Work progress from GeoQ</description>\n'
output += ' <NetworkLink>\n'
output += ' <name>'+doc_name+'</name>\n'
output += ' <visibility>1</visibility>\n'
output += ' <open>1</open>\n'
output += ' <description>'+description+'</description>\n'
output += ' <refreshVisibility>0</refreshVisibility>\n'
if setting_zoom_auto:
output += ' <flyToView>1</flyToView>\n'
output += ' <Link>\n'
output += ' <href>'+url+'</href>\n'
if settings_refresh_every:
output += ' <refreshInterval>'+str(settings_refresh_every)+'</refreshInterval>\n' # Refresh every n seconds
output += ' <refreshMode>onInterval</refreshMode>\n'
output += ' <viewRefreshTime>5</viewRefreshTime>\n' # Also refresh after viewscreen movement
output += ' <viewRefreshMode>onStop</viewRefreshMode>\n'
output += ' </Link>\n'
output += ' </NetworkLink>\n'
output += ' </Folder>\n'
output += '</kml>'
return HttpResponse(output, mimetype="application/vnd.google-earth.kml+xml", status=200)
| {
"content_hash": "38f5370d66ae1afe2d4f7b08ab93520c",
"timestamp": "",
"source": "github",
"line_count": 286,
"max_line_length": 187,
"avg_line_length": 43.61188811188811,
"alnum_prop": 0.5138298725246533,
"repo_name": "meilinger/geoq",
"id": "d25b60e3340dc02055c184baa423706e010a5863",
"size": "12473",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "geoq/core/kml_view.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "70513"
},
{
"name": "HTML",
"bytes": "189512"
},
{
"name": "JavaScript",
"bytes": "1409402"
},
{
"name": "Python",
"bytes": "672959"
}
],
"symlink_target": ""
} |
import util
import json
class docker_wrapper:
def __init__(self):
cmd = ['docker', '-v']
r = util.subp(cmd)
if r.return_code != 0:
raise Exception('Unable to communicate with the docker server')
def inspect(self, obj_id):
# returns dict representation of "docker inspect ID"
cmd = ['docker', 'inspect', obj_id]
r = util.subp(cmd)
if r.return_code != 0:
raise Exception('Unable to inspect object: %s' % obj_id)
return json.loads(r.stdout)
def driver(self):
# returns the storage driver docker is using
cmd = ['docker', 'info']
r = util.subp(cmd)
if r.return_code != 0:
raise Exception('Unable to get docker info')
for line in r.stdout.strip().split('\n'):
if line.startswith('Storage Driver'):
pre, _, post = line.partition(':')
return post.strip()
raise Exception('Unable to get docker storage driver')
def dm_pool(self):
# ONLY FOR DEVICEMAPPER
# returns the docker-pool docker is using
cmd = ['docker', 'info']
r = util.subp(cmd)
if r.return_code != 0:
raise Exception('Unable to get docker info')
for line in r.stdout.strip().split('\n'):
if line.strip().startswith('Pool Name'):
pre, _, post = line.partition(':')
return post.strip()
raise Exception('Unable to get docker pool name')
def images(self, allI=False, quiet=False):
# returns a list of dicts, each dict is an image's information
# except when quiet is used - which returns a list of image ids
# dict keys:
# Created
# Labels
# VirtualSize
# ParentId
# RepoTags
# RepoDigests
# Id
# Size
cmd = ['docker', 'images', '-q', '--no-trunc']
if allI:
cmd.append("-a")
r = util.subp(cmd)
if r.return_code != 0:
raise Exception('Unable to get docker images')
images = r.stdout.strip().split('\n')
if quiet:
return images
else:
ims = []
for i in images:
inspec = self.inspect(i)
inspec = inspec[0]
dic = {}
dic['Created'] = inspec['Created']
if inspec['Config']:
dic['Labels'] = inspec['Config']['Labels']
else:
dic['Labels'] = {}
dic['VirtualSize'] = inspec['VirtualSize']
dic['ParentId'] = inspec['Parent']
dic['RepoTags'] = inspec['RepoTags']
dic['RepoDigests'] = inspec['RepoDigests']
dic['Id'] = inspec['Id']
dic['Size'] = inspec['Size']
ims.append(dic)
return ims
def containers(self, allc=False, quiet=False):
# returns a list of dicts, each dict is an containers's information
# except when quiet is used - which returns a list of container ids
# dict keys:
# Status
# Created
# Image
# Labels
# NetworkSettings
# HostConfig
# ImageID
# Command
# Names
# Id
# Ports
cmd = ['docker', 'ps', '-q']
if allc:
cmd.append("-a")
r = util.subp(cmd)
if r.return_code != 0:
raise Exception('Unable to get docker containers')
containers = r.stdout.strip().split('\n')
if quiet:
return containers
else:
conts = []
for i in containers:
inspec = self.inspect(i)
inspec = inspec[0]
dic = {}
dic['Status'] = inspec['State']['Status']
dic['Created'] = inspec['Created']
dic['Image'] = inspec['Config']['Image']
dic['Labels'] = inspec['Config']['Labels']
dic['NetworkSettings'] = inspec['NetworkSettings']
dic['HostConfig'] = inspec['HostConfig']
dic['ImageID'] = inspec['Image']
dic['Command'] = inspec['Config']['Cmd']
dic['Names'] = inspec['Name']
dic['Id'] = inspec['Id']
dic['Ports'] = inspec['NetworkSettings']['Ports']
conts.append(dic)
return conts
| {
"content_hash": "a4268ce49984d555153b8cfe1e3dfb69",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 75,
"avg_line_length": 34.79389312977099,
"alnum_prop": 0.48222904782799475,
"repo_name": "wcmitchell/insights-core",
"id": "d1f741f533d2e4d240c096b2dfe4440deb7f65fd",
"size": "4578",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "insights/client/docker_wrap.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "19339"
},
{
"name": "Jupyter Notebook",
"bytes": "91793"
},
{
"name": "Python",
"bytes": "3414025"
},
{
"name": "Shell",
"bytes": "2274"
}
],
"symlink_target": ""
} |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_SGIX_texture_add_env'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_SGIX_texture_add_env',error_checker=_errors._error_checker)
GL_TEXTURE_ENV_BIAS_SGIX=_C('GL_TEXTURE_ENV_BIAS_SGIX',0x80BE)
| {
"content_hash": "c32564548d7a7fff81daa451367c77e9",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 117,
"avg_line_length": 37.666666666666664,
"alnum_prop": 0.7469026548672566,
"repo_name": "alexus37/AugmentedRealityChess",
"id": "2d5531388a20552b4bf780dc858a03416c8eca85",
"size": "565",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/raw/GL/SGIX/texture_add_env.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "158062"
},
{
"name": "C++",
"bytes": "267993"
},
{
"name": "CMake",
"bytes": "11319"
},
{
"name": "Fortran",
"bytes": "3707"
},
{
"name": "Makefile",
"bytes": "14618"
},
{
"name": "Python",
"bytes": "12813086"
},
{
"name": "Roff",
"bytes": "3310"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
} |
import os
import sublime
def get_plugin_path():
'''
Get path of the C++YouCompleteMe plugin.
'''
plugin_path = os.path.abspath(
os.path.join(sublime.packages_path(), 'C++YouCompleteMe'))
return plugin_path
def get_ycmd_path():
'''
Get path of the ycmd server.
'''
settings = sublime.load_settings('C++YouCompleteMe.sublime-settings')
ycmd_path = settings.get('ycmd_path', os.path.join(
get_plugin_path(), 'server'))
ycmd_path = os.path.join(ycmd_path, 'ycmd')
return ycmd_path
def get_python_path():
'''
Get path of python.
'''
settings = sublime.load_settings('C++YouCompleteMe.sublime-settings')
python_path = settings.get('python_path', 'python')
return python_path
def get_file_path(filepath=None):
'''
Get path of the editing file.
'''
if not filepath:
filepath = active_view().file_name()
if not filepath:
filepath = 'tmpfile.cpp'
return filepath
def get_error_panel_syntax_file():
settings = sublime.load_settings('C++YouCompleteMe.sublime-settings')
filepath = settings.get('error_panel_syntax_file', os.path.join(
get_plugin_path(), 'ErrorPanel.tmLanguage'))
return filepath
def check_highlight_on_save():
'''
Get if highlight on save.
'''
settings = sublime.load_settings('C++YouCompleteMe.sublime-settings')
rst = settings.get('highlight_errors_on_save', False)
return rst
def check_select_after_goto():
'''
Get if select element after goto command.
'''
settings = sublime.load_settings('C++YouCompleteMe.sublime-settings')
rst = settings.get('select_after_goto', False)
return rst
def check_ycmd_server():
'''
Check if ycmd server exists.
'''
return os.path.exists(get_ycmd_path())
def find_recursive(path):
'''
Find ycm_extra_conf in path and all directories above it.
'''
path = os.path.dirname(path)
while(True):
if os.path.exists(os.path.join(path, '.ycm_extra_conf.py')):
return os.path.join(path, '.ycm_extra_conf.py')
parent_dir = os.path.dirname(path)
if parent_dir == path:
break
else:
path = parent_dir
return None
def is_cpp(view):
'''
Determine if the given view is c++ code
'''
try:
return view.match_selector(view.sel()[0].begin(), 'source.c++')
print("[C++YouCompleteMe] IsCPP")
except:
return False
def active_view():
'''
Return active view
'''
return sublime.active_window().active_view()
def get_row_col(view, location=None):
'''
Return 1-based row and column of selected region
If location is None, set location to cursor location.
'''
try:
if not location:
location = view.sel()[0].begin()
row, col = view.rowcol(location)
return (row + 1, col + 1)
except:
return None
# def get_classname(view):
# p = view.sel()[0].begin()
# if 'meta.class-struct-block.c++' in view.scope_name(p):
# classes = view.find_by_selector('meta.class-struct-block.c++')
# entities = view.find_by_selector('entity.name.type.c++')
# for i in range(len(classes)):
# if classes[i].begin() <= p and classes[i].end() >= p:
# index = i
# for j in range(i + 1, len(classes)):
# if classes[j].begin() <= p and classes[j].end() >= p:
# index = j
# else:
# break
# return entities[index]
# return None
# def update_statusbar(self, view, view_line, view_cache, force=False):
# row, col = get_row_col(view)
# view_id = view.id()
# text_point = view.text_point(row, col)
# if not force:
# beg, end = view_line.get(view_id, (None, None))
# if beg and end and sublime.Region(beg, end).contains(text_point):
# return
# errors_regions = view_cache.get(view_id, {}).get(row, {})
# for region, msg in errors_regions.items():
# if sublime.Region(*region).contains(text_point) and msg:
# view.set_status('clang-code-errors', msg)
# view_line[view_id] = region
# return
# if view_id in view_line:
# del view_line[view_id]
# view.erase_status('clang-code-errors')
| {
"content_hash": "e8f5f172502dea5986d15cb77c451f24",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 75,
"avg_line_length": 27.71698113207547,
"alnum_prop": 0.5824824143408214,
"repo_name": "kracejic/CppYCM",
"id": "33bcbbc802c7b75f142f01b5685c8a31766ce4b2",
"size": "4407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "4729"
},
{
"name": "Python",
"bytes": "29823"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os.path as op
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_equal,
assert_allclose, assert_array_equal)
from scipy import sparse
from nose.tools import assert_true, assert_raises
import copy
import warnings
from mne.datasets import testing
from mne.label import read_label, label_sign_flip
from mne.event import read_events
from mne.epochs import Epochs
from mne.source_estimate import read_source_estimate, VolSourceEstimate
from mne import (read_cov, read_forward_solution, read_evokeds, pick_types,
pick_types_forward, make_forward_solution,
convert_forward_solution, Covariance)
from mne.io import Raw
from mne.minimum_norm.inverse import (apply_inverse, read_inverse_operator,
apply_inverse_raw, apply_inverse_epochs,
make_inverse_operator,
write_inverse_operator,
compute_rank_inverse,
prepare_inverse_operator)
from mne.utils import _TempDir, run_tests_if_main, slow_test
from mne.externals import six
test_path = testing.data_path(download=False)
s_path = op.join(test_path, 'MEG', 'sample')
fname_fwd = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-4-fwd.fif')
# Four inverses:
fname_full = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
fname_inv = op.join(s_path, 'sample_audvis_trunc-meg-eeg-oct-4-meg-inv.fif')
fname_inv_fixed_nodepth = op.join(s_path,
'sample_audvis_trunc-meg-eeg-oct-4-meg'
'-nodepth-fixed-inv.fif')
fname_inv_meeg_diag = op.join(s_path,
'sample_audvis_trunc-'
'meg-eeg-oct-4-meg-eeg-diagnoise-inv.fif')
fname_data = op.join(s_path, 'sample_audvis_trunc-ave.fif')
fname_cov = op.join(s_path, 'sample_audvis_trunc-cov.fif')
fname_raw = op.join(s_path, 'sample_audvis_trunc_raw.fif')
fname_event = op.join(s_path, 'sample_audvis_trunc_raw-eve.fif')
fname_label = op.join(s_path, 'labels', '%s.label')
fname_vol_inv = op.join(s_path,
'sample_audvis_trunc-meg-vol-7-meg-inv.fif')
# trans and bem needed for channel reordering tests incl. forward computation
fname_trans = op.join(s_path, 'sample_audvis_trunc-trans.fif')
s_path_bem = op.join(test_path, 'subjects', 'sample', 'bem')
fname_bem = op.join(s_path_bem, 'sample-320-320-320-bem-sol.fif')
src_fname = op.join(s_path_bem, 'sample-oct-4-src.fif')
snr = 3.0
lambda2 = 1.0 / snr ** 2
last_keys = [None] * 10
def read_forward_solution_meg(*args, **kwargs):
fwd = read_forward_solution(*args, **kwargs)
fwd = pick_types_forward(fwd, meg=True, eeg=False)
return fwd
def read_forward_solution_eeg(*args, **kwargs):
fwd = read_forward_solution(*args, **kwargs)
fwd = pick_types_forward(fwd, meg=False, eeg=True)
return fwd
def _get_evoked():
evoked = read_evokeds(fname_data, condition=0, baseline=(None, 0))
evoked.crop(0, 0.2)
return evoked
def _compare(a, b):
global last_keys
skip_types = ['whitener', 'proj', 'reginv', 'noisenorm', 'nchan',
'command_line', 'working_dir', 'mri_file', 'mri_id']
try:
if isinstance(a, dict):
assert_true(isinstance(b, dict))
for k, v in six.iteritems(a):
if k not in b and k not in skip_types:
raise ValueError('First one had one second one didn\'t:\n'
'%s not in %s' % (k, b.keys()))
if k not in skip_types:
last_keys.pop()
last_keys = [k] + last_keys
_compare(v, b[k])
for k, v in six.iteritems(b):
if k not in a and k not in skip_types:
raise ValueError('Second one had one first one didn\'t:\n'
'%s not in %s' % (k, a.keys()))
elif isinstance(a, list):
assert_true(len(a) == len(b))
for i, j in zip(a, b):
_compare(i, j)
elif isinstance(a, sparse.csr.csr_matrix):
assert_array_almost_equal(a.data, b.data)
assert_equal(a.indices, b.indices)
assert_equal(a.indptr, b.indptr)
elif isinstance(a, np.ndarray):
assert_array_almost_equal(a, b)
else:
assert_true(a == b)
except Exception as exptn:
print(last_keys)
raise exptn
def _compare_inverses_approx(inv_1, inv_2, evoked, rtol, atol,
check_depth=True):
# depth prior
if check_depth:
if inv_1['depth_prior'] is not None:
assert_array_almost_equal(inv_1['depth_prior']['data'],
inv_2['depth_prior']['data'], 5)
else:
assert_true(inv_2['depth_prior'] is None)
# orient prior
if inv_1['orient_prior'] is not None:
assert_array_almost_equal(inv_1['orient_prior']['data'],
inv_2['orient_prior']['data'])
else:
assert_true(inv_2['orient_prior'] is None)
# source cov
assert_array_almost_equal(inv_1['source_cov']['data'],
inv_2['source_cov']['data'])
# These are not as close as we'd like XXX
assert_array_almost_equal(np.abs(inv_1['eigen_fields']['data']),
np.abs(inv_2['eigen_fields']['data']), 0)
assert_array_almost_equal(np.abs(inv_1['eigen_leads']['data']),
np.abs(inv_2['eigen_leads']['data']), 0)
stc_1 = apply_inverse(evoked, inv_1, lambda2, "dSPM")
stc_2 = apply_inverse(evoked, inv_2, lambda2, "dSPM")
assert_true(stc_1.subject == stc_2.subject)
assert_equal(stc_1.times, stc_2.times)
assert_allclose(stc_1.data, stc_2.data, rtol=rtol, atol=atol)
assert_true(inv_1['units'] == inv_2['units'])
def _compare_io(inv_op, out_file_ext='.fif'):
tempdir = _TempDir()
if out_file_ext == '.fif':
out_file = op.join(tempdir, 'test-inv.fif')
elif out_file_ext == '.gz':
out_file = op.join(tempdir, 'test-inv.fif.gz')
else:
raise ValueError('IO test could not complete')
# Test io operations
inv_init = copy.deepcopy(inv_op)
write_inverse_operator(out_file, inv_op)
read_inv_op = read_inverse_operator(out_file)
_compare(inv_init, read_inv_op)
_compare(inv_init, inv_op)
@testing.requires_testing_data
def test_warn_inverse_operator():
"""Test MNE inverse warning without average EEG projection
"""
bad_info = copy.deepcopy(_get_evoked().info)
bad_info['projs'] = list()
fwd_op = read_forward_solution(fname_fwd, surf_ori=True)
noise_cov = read_cov(fname_cov)
with warnings.catch_warnings(record=True) as w:
make_inverse_operator(bad_info, fwd_op, noise_cov)
assert_equal(len(w), 1)
@slow_test
@testing.requires_testing_data
def test_make_inverse_operator():
"""Test MNE inverse computation (precomputed and non-precomputed)
"""
# Test old version of inverse computation starting from forward operator
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
inverse_operator = read_inverse_operator(fname_inv)
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
my_inv_op = make_inverse_operator(evoked.info, fwd_op, noise_cov,
loose=0.2, depth=0.8,
limit_depth_chs=False)
_compare_io(my_inv_op)
assert_true(inverse_operator['units'] == 'Am')
_compare_inverses_approx(my_inv_op, inverse_operator, evoked, 1e-2, 1e-2,
check_depth=False)
# Test MNE inverse computation starting from forward operator
my_inv_op = make_inverse_operator(evoked.info, fwd_op, noise_cov,
loose=0.2, depth=0.8)
_compare_io(my_inv_op)
_compare_inverses_approx(my_inv_op, inverse_operator, evoked, 1e-2, 1e-2)
assert_true('dev_head_t' in my_inv_op['info'])
assert_true('mri_head_t' in my_inv_op)
@slow_test
@testing.requires_testing_data
def test_inverse_operator_channel_ordering():
"""Test MNE inverse computation is immune to channel reorderings
"""
# These are with original ordering
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
fwd_orig = make_forward_solution(evoked.info, fname_trans, src_fname,
fname_bem, eeg=True, mindist=5.0)
fwd_orig = convert_forward_solution(fwd_orig, surf_ori=True)
inv_orig = make_inverse_operator(evoked.info, fwd_orig, noise_cov,
loose=0.2, depth=0.8,
limit_depth_chs=False)
stc_1 = apply_inverse(evoked, inv_orig, lambda2, "dSPM")
# Assume that a raw reordering applies to both evoked and noise_cov,
# so we don't need to create those from scratch. Just reorder them,
# then try to apply the original inverse operator
new_order = np.arange(len(evoked.info['ch_names']))
randomiser = np.random.RandomState(42)
randomiser.shuffle(new_order)
evoked.data = evoked.data[new_order]
evoked.info['ch_names'] = [evoked.info['ch_names'][n] for n in new_order]
evoked.info['chs'] = [evoked.info['chs'][n] for n in new_order]
cov_ch_reorder = [c for c in evoked.info['ch_names']
if (c in noise_cov.ch_names)]
new_order_cov = [noise_cov.ch_names.index(name) for name in cov_ch_reorder]
noise_cov['data'] = noise_cov.data[np.ix_(new_order_cov, new_order_cov)]
noise_cov['names'] = [noise_cov['names'][idx] for idx in new_order_cov]
fwd_reorder = make_forward_solution(evoked.info, fname_trans, src_fname,
fname_bem, eeg=True, mindist=5.0)
fwd_reorder = convert_forward_solution(fwd_reorder, surf_ori=True)
inv_reorder = make_inverse_operator(evoked.info, fwd_reorder, noise_cov,
loose=0.2, depth=0.8,
limit_depth_chs=False)
stc_2 = apply_inverse(evoked, inv_reorder, lambda2, "dSPM")
assert_equal(stc_1.subject, stc_2.subject)
assert_array_equal(stc_1.times, stc_2.times)
assert_allclose(stc_1.data, stc_2.data, rtol=1e-5, atol=1e-5)
assert_true(inv_orig['units'] == inv_reorder['units'])
# Reload with original ordering & apply reordered inverse
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
stc_3 = apply_inverse(evoked, inv_reorder, lambda2, "dSPM")
assert_allclose(stc_1.data, stc_3.data, rtol=1e-5, atol=1e-5)
@slow_test
@testing.requires_testing_data
def test_apply_inverse_operator():
"""Test MNE inverse application
"""
inverse_operator = read_inverse_operator(fname_full)
evoked = _get_evoked()
# Inverse has 306 channels - 4 proj = 302
assert_true(compute_rank_inverse(inverse_operator) == 302)
# Inverse has 306 channels - 4 proj = 302
assert_true(compute_rank_inverse(inverse_operator) == 302)
stc = apply_inverse(evoked, inverse_operator, lambda2, "MNE")
assert_true(stc.subject == 'sample')
assert_true(stc.data.min() > 0)
assert_true(stc.data.max() < 10e-9)
assert_true(stc.data.mean() > 1e-11)
# test if using prepared and not prepared inverse operator give the same
# result
inv_op = prepare_inverse_operator(inverse_operator, nave=evoked.nave,
lambda2=lambda2, method="MNE")
stc2 = apply_inverse(evoked, inv_op, lambda2, "MNE")
assert_array_almost_equal(stc.data, stc2.data)
assert_array_almost_equal(stc.times, stc2.times)
stc = apply_inverse(evoked, inverse_operator, lambda2, "sLORETA")
assert_true(stc.subject == 'sample')
assert_true(stc.data.min() > 0)
assert_true(stc.data.max() < 10.0)
assert_true(stc.data.mean() > 0.1)
stc = apply_inverse(evoked, inverse_operator, lambda2, "dSPM")
assert_true(stc.subject == 'sample')
assert_true(stc.data.min() > 0)
assert_true(stc.data.max() < 35)
assert_true(stc.data.mean() > 0.1)
# test without using a label (so delayed computation is used)
label = read_label(fname_label % 'Aud-lh')
stc = apply_inverse(evoked, inv_op, lambda2, "MNE")
stc_label = apply_inverse(evoked, inv_op, lambda2, "MNE",
label=label)
assert_equal(stc_label.subject, 'sample')
label_stc = stc.in_label(label)
assert_true(label_stc.subject == 'sample')
assert_array_almost_equal(stc_label.data, label_stc.data)
# Test we get errors when using custom ref or no average proj is present
evoked.info['custom_ref_applied'] = True
assert_raises(ValueError, apply_inverse, evoked, inv_op, lambda2, "MNE")
evoked.info['custom_ref_applied'] = False
evoked.info['projs'] = [] # remove EEG proj
assert_raises(ValueError, apply_inverse, evoked, inv_op, lambda2, "MNE")
@testing.requires_testing_data
def test_make_inverse_operator_fixed():
"""Test MNE inverse computation (fixed orientation)
"""
fwd_1 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=False)
fwd_2 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
# can't make depth-weighted fixed inv without surf ori fwd
assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_1,
noise_cov, depth=0.8, loose=None, fixed=True)
# can't make fixed inv with depth weighting without free ori fwd
assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_2,
noise_cov, depth=0.8, loose=None, fixed=True)
# now compare to C solution
# note that the forward solution must not be surface-oriented
# to get equivalency (surf_ori=True changes the normals)
inv_op = make_inverse_operator(evoked.info, fwd_2, noise_cov, depth=None,
loose=None, fixed=True)
inverse_operator_nodepth = read_inverse_operator(fname_inv_fixed_nodepth)
_compare_inverses_approx(inverse_operator_nodepth, inv_op, evoked, 0, 1e-2)
# Inverse has 306 channels - 6 proj = 302
assert_true(compute_rank_inverse(inverse_operator_nodepth) == 302)
@testing.requires_testing_data
def test_make_inverse_operator_free():
"""Test MNE inverse computation (free orientation)
"""
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
fwd_1 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=False)
fwd_2 = read_forward_solution_meg(fname_fwd, surf_ori=False,
force_fixed=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
# can't make free inv with fixed fwd
assert_raises(ValueError, make_inverse_operator, evoked.info, fwd_2,
noise_cov, depth=None)
# for free ori inv, loose=None and loose=1 should be equivalent
inv_1 = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=None)
inv_2 = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=1)
_compare_inverses_approx(inv_1, inv_2, evoked, 0, 1e-2)
# for depth=None, surf_ori of the fwd should not matter
inv_3 = make_inverse_operator(evoked.info, fwd_op, noise_cov, depth=None,
loose=None)
inv_4 = make_inverse_operator(evoked.info, fwd_1, noise_cov, depth=None,
loose=None)
_compare_inverses_approx(inv_3, inv_4, evoked, 0, 1e-2)
@testing.requires_testing_data
def test_make_inverse_operator_diag():
"""Test MNE inverse computation with diagonal noise cov
"""
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
fwd_op = read_forward_solution(fname_fwd, surf_ori=True)
inv_op = make_inverse_operator(evoked.info, fwd_op, noise_cov.as_diag(),
loose=0.2, depth=0.8)
_compare_io(inv_op)
inverse_operator_diag = read_inverse_operator(fname_inv_meeg_diag)
# This one's only good to zero decimal places, roundoff error (?)
_compare_inverses_approx(inverse_operator_diag, inv_op, evoked, 0, 1e0)
# Inverse has 366 channels - 6 proj = 360
assert_true(compute_rank_inverse(inverse_operator_diag) == 360)
@testing.requires_testing_data
def test_inverse_operator_noise_cov_rank():
"""Test MNE inverse operator with a specified noise cov rank
"""
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
inv = make_inverse_operator(evoked.info, fwd_op, noise_cov, rank=64)
assert_true(compute_rank_inverse(inv) == 64)
fwd_op = read_forward_solution_eeg(fname_fwd, surf_ori=True)
inv = make_inverse_operator(evoked.info, fwd_op, noise_cov,
rank=dict(eeg=20))
assert_true(compute_rank_inverse(inv) == 20)
@testing.requires_testing_data
def test_inverse_operator_volume():
"""Test MNE inverse computation on volume source space
"""
tempdir = _TempDir()
evoked = _get_evoked()
inverse_operator_vol = read_inverse_operator(fname_vol_inv)
assert_true(repr(inverse_operator_vol))
stc = apply_inverse(evoked, inverse_operator_vol, lambda2, "dSPM")
assert_true(isinstance(stc, VolSourceEstimate))
# volume inverses don't have associated subject IDs
assert_true(stc.subject is None)
stc.save(op.join(tempdir, 'tmp-vl.stc'))
stc2 = read_source_estimate(op.join(tempdir, 'tmp-vl.stc'))
assert_true(np.all(stc.data > 0))
assert_true(np.all(stc.data < 35))
assert_array_almost_equal(stc.data, stc2.data)
assert_array_almost_equal(stc.times, stc2.times)
@slow_test
@testing.requires_testing_data
def test_io_inverse_operator():
"""Test IO of inverse_operator
"""
tempdir = _TempDir()
inverse_operator = read_inverse_operator(fname_inv)
x = repr(inverse_operator)
assert_true(x)
assert_true(isinstance(inverse_operator['noise_cov'], Covariance))
# just do one example for .gz, as it should generalize
_compare_io(inverse_operator, '.gz')
# test warnings on bad filenames
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
inv_badname = op.join(tempdir, 'test-bad-name.fif.gz')
write_inverse_operator(inv_badname, inverse_operator)
read_inverse_operator(inv_badname)
assert_true(len(w) == 2)
# make sure we can write and read
inv_fname = op.join(tempdir, 'test-inv.fif')
args = (10, 1. / 9., 'dSPM')
inv_prep = prepare_inverse_operator(inverse_operator, *args)
write_inverse_operator(inv_fname, inv_prep)
inv_read = read_inverse_operator(inv_fname)
_compare(inverse_operator, inv_read)
inv_read_prep = prepare_inverse_operator(inv_read, *args)
_compare(inv_prep, inv_read_prep)
inv_prep_prep = prepare_inverse_operator(inv_prep, *args)
_compare(inv_prep, inv_prep_prep)
@testing.requires_testing_data
def test_apply_mne_inverse_raw():
"""Test MNE with precomputed inverse operator on Raw
"""
start = 3
stop = 10
raw = Raw(fname_raw)
label_lh = read_label(fname_label % 'Aud-lh')
_, times = raw[0, start:stop]
inverse_operator = read_inverse_operator(fname_full)
inverse_operator = prepare_inverse_operator(inverse_operator, nave=1,
lambda2=lambda2, method="dSPM")
for pick_ori in [None, "normal"]:
stc = apply_inverse_raw(raw, inverse_operator, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=pick_ori, buffer_size=None,
prepared=True)
stc2 = apply_inverse_raw(raw, inverse_operator, lambda2, "dSPM",
label=label_lh, start=start, stop=stop,
nave=1, pick_ori=pick_ori,
buffer_size=3, prepared=True)
if pick_ori is None:
assert_true(np.all(stc.data > 0))
assert_true(np.all(stc2.data > 0))
assert_true(stc.subject == 'sample')
assert_true(stc2.subject == 'sample')
assert_array_almost_equal(stc.times, times)
assert_array_almost_equal(stc2.times, times)
assert_array_almost_equal(stc.data, stc2.data)
@testing.requires_testing_data
def test_apply_mne_inverse_fixed_raw():
"""Test MNE with fixed-orientation inverse operator on Raw
"""
raw = Raw(fname_raw)
start = 3
stop = 10
_, times = raw[0, start:stop]
label_lh = read_label(fname_label % 'Aud-lh')
# create a fixed-orientation inverse operator
fwd = read_forward_solution_meg(fname_fwd, force_fixed=False,
surf_ori=True)
noise_cov = read_cov(fname_cov)
inv_op = make_inverse_operator(raw.info, fwd, noise_cov,
loose=None, depth=0.8, fixed=True)
inv_op2 = prepare_inverse_operator(inv_op, nave=1,
lambda2=lambda2, method="dSPM")
stc = apply_inverse_raw(raw, inv_op2, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=None, buffer_size=None, prepared=True)
stc2 = apply_inverse_raw(raw, inv_op2, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=None, buffer_size=3, prepared=True)
stc3 = apply_inverse_raw(raw, inv_op, lambda2, "dSPM",
label=label_lh, start=start, stop=stop, nave=1,
pick_ori=None, buffer_size=None)
assert_true(stc.subject == 'sample')
assert_true(stc2.subject == 'sample')
assert_array_almost_equal(stc.times, times)
assert_array_almost_equal(stc2.times, times)
assert_array_almost_equal(stc3.times, times)
assert_array_almost_equal(stc.data, stc2.data)
assert_array_almost_equal(stc.data, stc3.data)
@testing.requires_testing_data
def test_apply_mne_inverse_epochs():
"""Test MNE with precomputed inverse operator on Epochs
"""
inverse_operator = read_inverse_operator(fname_full)
label_lh = read_label(fname_label % 'Aud-lh')
label_rh = read_label(fname_label % 'Aud-rh')
event_id, tmin, tmax = 1, -0.2, 0.5
raw = Raw(fname_raw)
picks = pick_types(raw.info, meg=True, eeg=False, stim=True, ecg=True,
eog=True, include=['STI 014'], exclude='bads')
reject = dict(grad=4000e-13, mag=4e-12, eog=150e-6)
flat = dict(grad=1e-15, mag=1e-15)
events = read_events(fname_event)[:15]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), reject=reject, flat=flat)
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_lh, pick_ori="normal")
inverse_operator = prepare_inverse_operator(inverse_operator, nave=1,
lambda2=lambda2, method="dSPM")
stcs2 = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_lh, pick_ori="normal",
prepared=True)
# test if using prepared and not prepared inverse operator give the same
# result
assert_array_almost_equal(stcs[0].data, stcs2[0].data)
assert_array_almost_equal(stcs[0].times, stcs2[0].times)
assert_true(len(stcs) == 2)
assert_true(3 < stcs[0].data.max() < 10)
assert_true(stcs[0].subject == 'sample')
data = sum(stc.data for stc in stcs) / len(stcs)
flip = label_sign_flip(label_lh, inverse_operator['src'])
label_mean = np.mean(data, axis=0)
label_mean_flip = np.mean(flip[:, np.newaxis] * data, axis=0)
assert_true(label_mean.max() < label_mean_flip.max())
# test extracting a BiHemiLabel
stcs_rh = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_rh, pick_ori="normal",
prepared=True)
stcs_bh = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
label=label_lh + label_rh,
pick_ori="normal",
prepared=True)
n_lh = len(stcs[0].data)
assert_array_almost_equal(stcs[0].data, stcs_bh[0].data[:n_lh])
assert_array_almost_equal(stcs_rh[0].data, stcs_bh[0].data[n_lh:])
# test without using a label (so delayed computation is used)
stcs = apply_inverse_epochs(epochs, inverse_operator, lambda2, "dSPM",
pick_ori="normal", prepared=True)
assert_true(stcs[0].subject == 'sample')
label_stc = stcs[0].in_label(label_rh)
assert_true(label_stc.subject == 'sample')
assert_array_almost_equal(stcs_rh[0].data, label_stc.data)
@testing.requires_testing_data
def test_make_inverse_operator_bads():
"""Test MNE inverse computation given a mismatch of bad channels
"""
fwd_op = read_forward_solution_meg(fname_fwd, surf_ori=True)
evoked = _get_evoked()
noise_cov = read_cov(fname_cov)
# test bads
bad = evoked.info['bads'].pop()
inv_ = make_inverse_operator(evoked.info, fwd_op, noise_cov, loose=None)
union_good = set(noise_cov['names']) & set(evoked.ch_names)
union_bads = set(noise_cov['bads']) & set(evoked.info['bads'])
evoked.info['bads'].append(bad)
assert_true(len(set(inv_['info']['ch_names']) - union_good) == 0)
assert_true(len(set(inv_['info']['bads']) - union_bads) == 0)
run_tests_if_main()
| {
"content_hash": "0c371b6a5399c9a88a3d517c93e0d4ad",
"timestamp": "",
"source": "github",
"line_count": 621,
"max_line_length": 79,
"avg_line_length": 42.11916264090177,
"alnum_prop": 0.6114849365346383,
"repo_name": "leggitta/mne-python",
"id": "22747ceda789c2b43a6b88a699c9bbbb63dbcee5",
"size": "26156",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "mne/minimum_norm/tests/test_inverse.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "3117"
},
{
"name": "PowerShell",
"bytes": "2988"
},
{
"name": "Python",
"bytes": "4324836"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
# PyQt tutorial 3
import sys
from PySide import QtCore, QtGui
app = QtGui.QApplication(sys.argv)
window = QtGui.QWidget()
window.resize(200, 120)
quit = QtGui.QPushButton("Quit", window)
quit.setFont(QtGui.QFont("Times", 18, QtGui.QFont.Bold))
quit.setGeometry(10, 40, 180, 40)
QtCore.QObject.connect(quit, QtCore.SIGNAL("clicked()"),
app, QtCore.SLOT("quit()"))
window.show()
sys.exit(app.exec_())
| {
"content_hash": "887c13bbdab339fbd93f543ade7fb7d9",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 56,
"avg_line_length": 21.476190476190474,
"alnum_prop": 0.647450110864745,
"repo_name": "cherry-wb/SideTools",
"id": "2d133948c7e2ea3caed6e92697b1ec00bd4162c9",
"size": "474",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/tutorial/t3.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "9501"
},
{
"name": "Python",
"bytes": "4071976"
},
{
"name": "Shell",
"bytes": "182"
},
{
"name": "TypeScript",
"bytes": "25292"
}
],
"symlink_target": ""
} |
import os
from datadog_checks.dev import get_docker_hostname, get_here
HERE = get_here()
DOCKER_COMPOSE_FILE = os.path.join(HERE, 'docker', 'docker-compose.yaml')
HOST = get_docker_hostname()
PORT = '8080'
SCHEME = 'http'
INSTANCE = {'scheme': SCHEME, 'host': HOST, 'port': PORT}
INSTANCE_BAD = {'scheme': 'https', 'host': 'foobar', 'port': 9000}
INSTANCE_INVALID = {}
| {
"content_hash": "327d516f6eeb876a0a714c1e6de4464a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 73,
"avg_line_length": 28.615384615384617,
"alnum_prop": 0.6801075268817204,
"repo_name": "DataDog/integrations-extras",
"id": "d84bc60c5697b0f6aa26b1c484e152510f806ea8",
"size": "479",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "traefik/tests/common.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "4265"
},
{
"name": "Go",
"bytes": "4119"
},
{
"name": "PHP",
"bytes": "3192"
},
{
"name": "Python",
"bytes": "1219552"
},
{
"name": "Ruby",
"bytes": "8005"
},
{
"name": "Shell",
"bytes": "4237"
}
],
"symlink_target": ""
} |
from lapostesdk.apis import apisuivi, apicontroladresse
class LaPosteApi(object):
def __init__(self, api_key):
self.api_key = api_key
self.suivi = apisuivi.ApiSuivi(self.api_key)
self.controladresse = apicontroladresse.ApiControlAdresse(self.api_key)
| {
"content_hash": "9ee100b5d5110f6b0b352e6e8a7d02ca",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 79,
"avg_line_length": 35.125,
"alnum_prop": 0.7117437722419929,
"repo_name": "geelweb/laposte-python-sdk",
"id": "c944f14fdab388b21ad0e4582fa01f0d4bc5c8eb",
"size": "281",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lapostesdk/laposteapi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6565"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import datetime
import json
import os
import re
import unittest
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.template.response import TemplateResponse
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, modify_settings,
override_settings, skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix, patch_logger
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, six, translation
from django.utils._os import upath
from django.utils.cache import get_max_age
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.six.moves.urllib.parse import parse_qsl, urljoin, urlparse
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Answer, Article, BarAccount, Book, Bookmark,
Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice,
City, Collector, Color, ComplexSortedPerson, CoverLetter, CustomArticle,
CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel, ExternalSubscriber,
Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount,
FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link,
MainPrepopulated, ModelWithStringPrimaryKey, OtherStory, Paper, Parent,
ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona, Picture,
Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post,
PrePopulatedPost, Promo, Question, Recommendation, Recommender,
RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Restaurant,
RowLevelChangePermissionModel, SecretHideout, Section, ShortMessage,
Simple, State, Story, Subscriber, SuperSecretHideout, SuperVillain,
Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject,
UnorderedObject, Villain, Vodcast, Whatsit, Widget, Worker, WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
class AdminFieldExtractionMixin(object):
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
admin_form = response.context['adminform']
fieldsets = list(admin_form)
field_lines = []
for fieldset in fieldsets:
field_lines += list(fieldset)
fields = []
for field_line in field_lines:
fields += list(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field['name'] == field_name:
return field
@override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.color1 = Color.objects.create(value='Red', warm=True)
cls.color2 = Color.objects.create(value='Orange', warm=True)
cls.color3 = Color.objects.create(value='Blue', warm=False)
cls.color4 = Color.objects.create(value='Green', warm=False)
cls.fab1 = Fabric.objects.create(surface='x')
cls.fab2 = Fabric.objects.create(surface='y')
cls.fab3 = Fabric.objects.create(surface='plain')
cls.b1 = Book.objects.create(name='Book 1')
cls.b2 = Book.objects.create(name='Book 2')
cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1)
cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2)
cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1)
cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2)
cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1')
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2')
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def tearDown(self):
formats.reset_format_cache()
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(force_bytes(text1)),
response.content.index(force_bytes(text2)),
(failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset)
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_admin_static_template_tag(self):
"""
Test that admin_static.static is pointing to the collectstatic version
(as django.contrib.collectstatic is in installed apps).
"""
old_url = staticfiles_storage.base_url
staticfiles_storage.base_url = '/test/'
try:
self.assertEqual(static('path'), '/test/path')
finally:
staticfiles_storage.base_url = old_url
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertContains(
response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response"
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
Ensure GET on the change_view works (returns an HTTP 404 error, see
#11191) when passing a string as the PK argument for a model with an
integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '')
)
self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
def test_basic_inheritance_GET_string_PK(self):
"""
Ensure GET on the change_view works on inherited models (returns an
HTTP 404 error, see #19951) when passing a string as the PK argument
for a model with an integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertContains(response, 'title with a new\\nline')
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse('admin:admin_views_section_change', args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
# callables display the callable name.
self.assertContains(response, 'column-callable_year')
self.assertContains(response, 'field-callable_year')
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, 'column-lambda8')
self.assertContains(response, 'field-lambda8')
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order."
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(
response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order."
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(
response, '2009', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2000',
"Results of sorting on ModelAdmin method are out of order."
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(
response, '2000', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2009',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# Check that we get the columns we expect if we have two columns
# that correspond to the same ordering field
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
Ensures that the admin shows default sort indicators for all
kinds of 'ordering' fields: field names, method on the model
admin and model itself, and other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertContains(
response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view"
)
self.assertNotContains(
response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to"
)
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': dict(
values=[c.id for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.id == value),
'chap__title': dict(
values=[c.title for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.title == value),
'chap__book__id__exact': dict(
values=[b.id for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.id == value),
'chap__book__name': dict(
values=[b.name for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.name == value),
'chap__book__promo__id__exact': dict(
values=[p.id for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
'chap__book__promo__name': dict(
values=[p.name for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s">' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,))
link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(response, '<a href="?surface__exact=x">Horizontal</a>', msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="?surface__exact=y">Vertical</a>', msg_prefix=fail_msg, html=True)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.svg')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_changelist')
response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse('admin:admin_views_referencedbygenrel_changelist')
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(url, {TO_FIELD_VAR: 'object_id'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
section = Section.objects.create()
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_change', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_delete', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse('admin:index'))
self.assertNotContains(
response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.'
)
def test_change_view_with_show_delete_extra_context(self):
"""
Ensured that the 'show_delete' context variable in the admin's change
view actually controls the display of the delete button.
Refs #10057.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_allows_attributeerror_to_bubble_up(self):
"""
Ensure that AttributeErrors are allowed to bubble when raised inside
a change list view.
Requires a model to be created so there's something to be displayed
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls
Regression test for #20934
"""
UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">UnchangeableObject object</th>', html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_resolve_admin_views(self):
index_match = resolve('/test_admin/admin4/')
list_match = resolve('/test_admin/admin4/auth/user/')
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's and the shared tests templates dirs in DIRS to take precedence
# over the admin's templates dir.
'DIRS': [
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
os.path.join(os.path.dirname(os.path.dirname(upath(__file__))), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
def test_extended_bodyclass_template_change_form(self):
"""
Ensure that the admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_change_password(self):
"""
Ensure that the auth/user/change_password.html template uses block
super in the bodyclass block.
"""
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_index(self):
"""
Ensure that the admin/index.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
Ensure that the admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
Ensure that the admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
Ensure that the admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
Ensure that the admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
Ensure that one can use a custom template to render an admin filter.
Refs #17515.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
Tests whether change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
Tests that the behavior for setting initial
form data can be overridden in the ModelAdmin class.
Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
Ensure that the minified versions of the JS files are only used when
DEBUG is False.
Refs #17521.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'vendor/jquery/jquery.js')
self.assertContains(response, 'vendor/jquery/jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'vendor/jquery/jquery.js')
self.assertNotContains(response, 'vendor/jquery/jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(ROOT_URLCONF='admin_views.urls')
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""Ensure save as actually creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
def test_save_as_new_with_validation_errors(self):
"""
Ensure that when you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), {
'_saveasnew': '',
'gender': 'invalid',
'_addanother': 'fail',
})
self.assertContains(response, 'Please correct the errors below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': 'Child',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': '_invalid',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': '_invalid',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': 'Father',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
@override_settings(ROOT_URLCONF='admin_views.urls')
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
self.assertContains(login, 'path/to/media.css')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
try:
response = self.client.get(reverse('admin2:index'))
except TypeError:
self.fail('AdminSite.index_template should accept a list of template paths')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username='super')
try:
response = self.client.get(
reverse('admin4:auth_user_password_change', args=(su.pk,))
)
except TypeError:
self.fail('ModelAdmin.change_user_password_template should accept a list of template paths')
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can add Articles
cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts)))
# User who can change Articles
cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
# User who can delete Articles
cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts)))
cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta)))
# login POST dicts
cls.index_url = reverse('admin:index')
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super',
'password': 'secret',
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'secret',
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'notsecret',
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'adduser',
'password': 'secret',
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'changeuser',
'password': 'secret',
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'deleteuser',
'password': 'secret',
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'joepublic',
'password': 'secret',
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
form = login.context[0].get('form')
self.assertEqual(form.errors['username'][0], 'This field is required.')
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse('admin:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index'))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = 'You are authenticated as {}'
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertNotContains(response, hint_template.format(''), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(
addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# Check that the addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(
addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error'
)
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors'
)
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
for login_user in [self.superuser, self.adduser, self.changeuser, self.deleteuser]:
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
'_saveasnew': 'Save as new',
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(name="even id")
for login_user in [self.superuser, self.adduser, self.changeuser, self.deleteuser]:
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)))
self.assertEqual(response.status_code, 404)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the "change section" link.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the "delete section" link.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_index_fail_early(self):
"""
If a user has no module perms, avoid iterating over all the modeladmins
in the registry.
"""
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.force_login(self.changeuser)
# the user has no module permissions, because this module doesn't exist
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 403)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
six.assertRegex(self, response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
Ensure that has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
def test_overriding_has_module_permission(self):
"""
Ensure that overriding has_module_permission() has the desired effect.
In this case, it always returns False, so the module should not be
displayed on the admin index page for any users.
"""
index_url = reverse('admin7:index')
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doen't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True)
self.assertContains(
response,
'<li class="success">The article "Fun & games" was added successfully.</li>',
html=True
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta)))
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.v1 = Villain.objects.create(name='Adam')
cls.v2 = Villain.objects.create(name='Sue')
cls.sv1 = SuperVillain.objects.create(name='Bob')
cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2)
cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2)
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1)
cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1)
cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1)
cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1)
cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
force_bytes(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)),
reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)),
)
)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
six.assertRegex(self, response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta)))
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question='Why?')
Answer.objects.create(question=q, answer='Because.')
response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'})
self.assertEqual(Question.objects.count(), 1)
self.assertContains(response, "would require deleting the following protected related objects")
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(self.pl1.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(self.pl2.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse(
'admin:admin_views_supervillain_change', args=(self.sv1.pk,)
),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,)))
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name='djangoproject')
tag = FunkyTag.objects.create(content_object=bookmark, name='django')
tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,)))
self.assertContains(response, should_contain)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.v1 = Villain.objects.create(name='Adam')
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something')
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),))
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse('admin:admin_views_modelwithstringprimarykey_add')
change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),))
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link))
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted
@override_settings(ROOT_URLCONF='admin_views.urls')
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
"""
Ensure that we see the admin login form.
"""
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Ensure that staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = '/test_admin/admin/secure-view2/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.b1 = Book.objects.create(name='Lærdommer')
cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1)
cls.chap1 = Chapter.objects.create(
title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>',
book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1)
cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>')
cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>')
cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>')
cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>')
cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>')
cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>')
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
Ensure that the delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "%s" % self.per3.pk,
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
''' Ensure that non field errors are displayed for each of the
forms in the changelist's formset. Refs #13126.
'''
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Ensure that pagination works for list_editable items.
Refs #16819.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
""" Ensure that hidden pk fields aren't displayed in the table body and
that their corresponding human-readable value is displayed instead.
Note that the hidden pk fields are in fact be displayed but
separately (not in the table), and only once.
Refs #12475.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
response = self.client.get(reverse('admin:admin_views_story_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title='The adventures of Guido',
content='Once upon a time in Djangoland...',
)
story2 = OtherStory.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar')
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo')
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few')
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas')
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"Check that a search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
Ensure that the to_field GET parameter is preserved when a search
is performed. Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True
)
self.assertTrue(response.context['cl'].show_admin_actions)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"Ensure that inline models which inherit from a common parent are correctly handled by admin."
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminActionsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = ExternalSubscriber.objects.create(name='John Doe', email='john@example.org')
cls.s2 = Subscriber.objects.create(name='Max Mustermann', email='max@example.org')
def setUp(self):
self.client.force_login(self.superuser)
def test_model_admin_custom_action(self):
"Tests a custom action defined in a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'mail_admin',
'index': 0,
}
self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action')
def test_model_admin_default_delete_action(self):
"Tests the default delete action defined as a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'post': 'yes',
}
confirmation = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertIsInstance(confirmation, TemplateResponse)
self.assertContains(confirmation, "Are you sure you want to delete the selected subscribers?")
self.assertContains(confirmation, "<h2>Summary</h2>")
self.assertContains(confirmation, "<li>Subscribers: 2</li>")
self.assertContains(confirmation, "<li>External subscribers: 1</li>")
self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2)
self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data)
self.assertEqual(Subscriber.objects.count(), 0)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_non_localized_pk(self):
"""If USE_THOUSAND_SEPARATOR is set, make sure that the ids for
the objects selected for deletion are rendered without separators.
Refs #14895.
"""
subscriber = Subscriber.objects.get(id=1)
subscriber.id = 9999
subscriber.save()
action_data = {
ACTION_CHECKBOX_NAME: [9999, 2],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html')
self.assertContains(response, 'value="9999"') # Instead of 9,999
self.assertContains(response, 'value="2"')
def test_model_admin_default_delete_action_protected(self):
"""
Tests the default delete action defined as a ModelAdmin method in the
case where some related objects are protected from deletion.
"""
q1 = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q1, answer="Because.")
a2 = Answer.objects.create(question=q1, answer="Yes.")
q2 = Question.objects.create(question="Wherefore?")
action_data = {
ACTION_CHECKBOX_NAME: [q1.pk, q2.pk],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = action_data.copy()
delete_confirmation_data['post'] = 'yes'
response = self.client.post(reverse('admin:admin_views_question_changelist'), action_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)),
html=True
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)),
html=True
)
# A POST request to delete protected objects should display the page
# which says the deletion is prohibited.
response = self.client.post(reverse('admin:admin_views_question_changelist'), delete_confirmation_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertEqual(Question.objects.count(), 2)
def test_model_admin_default_delete_action_no_change_url(self):
"""
Default delete action shouldn't break if a user's ModelAdmin removes the url for change_view.
Regression test for #20640
"""
obj = UnchangeableObject.objects.create()
action_data = {
ACTION_CHECKBOX_NAME: obj.pk,
"action": "delete_selected",
"index": "0",
}
response = self.client.post(reverse('admin:admin_views_unchangeableobject_changelist'), action_data)
# No 500 caused by NoReverseMatch
self.assertEqual(response.status_code, 200)
# The page shouldn't display a link to the nonexistent change page
self.assertContains(response, "<li>Unchangeable object: UnchangeableObject object</li>", 1, html=True)
def test_custom_function_mail_action(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_custom_function_action_with_redirect(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'redirect_to',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 302)
def test_default_redirect(self):
"""
Test that actions which don't return an HttpResponse are redirected to
the same page, retaining the querystring (which may contain changelist
information).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
url = reverse('admin:admin_views_externalsubscriber_changelist') + '?o=1'
response = self.client.post(url, action_data)
self.assertRedirects(response, url)
def test_custom_function_action_streaming_response(self):
"""Tests a custom action that returns a StreamingHttpResponse."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'download',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
content = b''.join(response.streaming_content)
self.assertEqual(content, b'This is the content of the file')
self.assertEqual(response.status_code, 200)
def test_custom_function_action_no_perm_response(self):
"""Tests a custom action that returns an HttpResponse with 403 code."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'no_perm',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'No permission to perform this action')
def test_actions_ordering(self):
"""
Ensure that actions are ordered as expected.
Refs #15964.
"""
response = self.client.get(reverse('admin:admin_views_externalsubscriber_changelist'))
self.assertContains(response, '''<label>Action: <select name="action">
<option value="" selected="selected">---------</option>
<option value="delete_selected">Delete selected external
subscribers</option>
<option value="redirect_to">Redirect to (Awesome action)</option>
<option value="external_mail">External mail (Another awesome
action)</option>
<option value="download">Download subscription</option>
<option value="no_perm">No permission to run</option>
</select>''', html=True)
def test_model_without_action(self):
"Tests a ModelAdmin without any action"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertNotContains(
response, '<input type="checkbox" class="action-select"',
msg_prefix="Found an unexpected action toggle checkboxbox in response"
)
self.assertNotContains(response, '<input type="checkbox" class="action-select"')
def test_model_without_action_still_has_jquery(self):
"Tests that a ModelAdmin without any actions still gets jQuery included in page"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertContains(
response, 'jquery.min.js',
msg_prefix="jQuery missing from admin pages for model with no admin actions"
)
def test_action_column_class(self):
"Tests that the checkbox column class is present in the response"
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
self.assertContains(response, 'action-checkbox-column')
def test_multiple_actions_form(self):
"""
Test that actions come from the form whose submit button was pressed (#10618).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
# Two different actions selected on the two forms...
'action': ['external_mail', 'delete_selected'],
# ...but we clicked "go" on the top form.
'index': 0
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
# Send mail, don't delete.
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_user_message_on_none_selected(self):
"""
User should see a warning when 'Go' is pressed and no items are selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """Items must be selected in order to perform actions on them. No items have been changed."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_user_message_on_no_action(self):
"""
User should see a warning when 'Go' is pressed and no action is selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': '',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """No action selected."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_selection_counter(self):
"""
Check if the selection counter is there.
"""
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertContains(response, '0 of 2 selected')
def test_popup_actions(self):
""" Actions should not be shown in popups. """
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
response = self.client.get(
reverse('admin:admin_views_subscriber_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context["action_form"], None)
def test_popup_template_response(self):
"""
Success on popups shall be rendered from template in order to allow
easy customization.
"""
response = self.client.post(
reverse('admin:admin_views_actor_add') + '?%s=1' % IS_POPUP_VAR,
{'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, 'admin/popup_response.html')
def test_popup_template_escaping(self):
popup_response_data = json.dumps({
'new_value': 'new_value\\',
'obj': 'obj\\',
'value': 'value\\',
})
context = {
'popup_response_data': popup_response_data,
}
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
r'"value\\"', output
)
self.assertIn(
r'"new_value\\"', output
)
self.assertIn(
r'"obj\\"', output
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Ensure that data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
self.pks = [EmptyModel.objects.create().id for i in range(3)]
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
response = self.client.get(reverse('admin:admin_views_emptymodel_change', args=(i,)))
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 404)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'Candidate, Best</a>" was added successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = ShortMessage.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'ShortMessage object</a>" was added successfully.</li>' %
reverse('admin:admin_views_shortmessage_change', args=(pk,)), html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Urgent telegram</a>" was added successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Paper.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'Paper object</a>" was added successfully.</li>' %
reverse('admin:admin_views_paper_change', args=(pk,)), html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'John Doe II</a>" was changed successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'ShortMessage_Deferred_timestamp object</a>" was changed successfully.</li>' %
reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Telegram without typo</a>" was changed successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'Paper_Deferred_author object</a>" was changed successfully.</li>' %
reverse('admin:admin_views_paper_change', args=(p.pk,)), html=True
)
def test_history_view_custom_qs(self):
"""
Ensure that custom querysets are considered for the admin history view.
Refs #21013.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Test that inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": six.text_type(self.picture.id),
"pictures-0-gallery": six.text_type(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""Check that an inline with an editable ordering fields is
updated correctly. Regression for #10922"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF='admin_views.urls')
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertEqual(get_max_age(response), None)
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertEqual(get_max_age(response), None)
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertEqual(get_max_age(response), None)
@override_settings(ROOT_URLCONF='admin_views.urls')
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(response, ""dependency_ids": ["#id_title"]")
self.assertContains(response, ""id": "#id_prepopulatedsubpost_set-0-subslug"")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(response, ""dependency_ids": ["#id_title"]")
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug""
)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
@override_settings(ROOT_URLCONF='admin_views.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def test_prepopulated_fields(self):
"""
Ensure that the JavaScript-automated prepopulated fields work with the
main form and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add'))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_id('id_name').send_keys(' this is the mAin nÀMë and it\'s awεšomeııı')
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesomeiii-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesomeiii')
self.assertEqual(slug3, 'main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys(
' here is a sŤāÇkeð inline ! '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys(
' now you haVe anöther sŤāÇkeð inline with a very ... '
'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value')
# 50 characters maximum for slug1 field
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo')
# 60 characters maximum for slug2 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo')
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys(
'And now, with a tÃbűlaŘ inline !!!'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys(
'a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšomeııı',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesomeiii-2012-02-18',
slug2='option-two-main-name-and-its-awesomeiii',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo',
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name='a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
Ensure that the prepopulation works for existing objects too, as long
as the original field is empty.
Refs #19082.
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,))
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
Test that the 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add'))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide")
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name='Test section')
Article.objects.create(
title='foo',
content='<p>Middle content</p>',
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist'))
# Change popup
self.selenium.find_element_by_id('change_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Change section')
name_input = self.selenium.find_element_by_id('id_name')
name_input.clear()
name_input.send_keys('edited section')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, 'edited section')
# Add popup
self.selenium.find_element_by_id('add_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Add section')
self.selenium.find_element_by_id('id_name').send_keys('new section')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, 'new section')
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('change_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id))
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.support.ui import Select
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add'))
self.selenium.find_element_by_id('add_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_id('id_title').send_keys('test')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('delete_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Yes, I\'m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, '---------')
self.assertEqual(select.first_selected_option.get_attribute('value'), '')
def test_list_editable_raw_id_fields(self):
parent = ParentWithUUIDPK.objects.create(title='test')
parent2 = ParentWithUUIDPK.objects.create(title='test2')
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('lookup_id_form-0-parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
# Select "parent2" in the popup.
self.selenium.find_element_by_link_text(str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value')
self.assertEqual(value, str(parent2.pk))
@override_settings(ROOT_URLCONF='admin_views.urls')
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags
self.assertContains(response, "Multiline<br />test<br />string")
self.assertContains(response, "<p>Multiline<br />html<br />content</p>", html=True)
self.assertContains(response, "InlineMultiline<br />test<br />string")
# Remove only this last line when the deprecation completes.
self.assertContains(response, "<p>Multiline<br />html<br />content<br />with allow tags</p>", html=True)
self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<p class="help">', 3)
self.assertContains(
response,
'<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
self.assertContains(
response,
'<p class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
self.assertContains(
response,
'<p class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test", content="test",
readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest',
)
Link.objects.create(
url="http://www.djangoproject.com", post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
# Checking readonly field.
self.assertContains(response, 'test<br /><br />test<br /><br />test<br /><br />test')
# Checking readonly field in inline.
self.assertContains(response, 'test<br />link')
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<p>No opinion</p>', html=True)
self.assertNotContains(response, '<p>(None)</p>')
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name='Adam')
pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl)
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), 'Brand New Plot')
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), '-') # default empty value
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertContains(response, '<p class="help">Overridden help text for the date</p>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF='admin_views.urls')
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse('admin:admin_views_inquisition_changelist'))
self.assertContains(response, 'list-display-sketch')
@override_settings(ROOT_URLCONF='admin_views.urls')
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
adminform = response.context['adminform']
self.assertNotIn('password', adminform.form.errors)
self.assertEqual(adminform.form.errors['password2'], ["The two password fields didn't match."])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1')
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True)
self.assertContains(response, '"obj": "newuser"')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
'_popup': '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"obj": "newuser"')
self.assertContains(response, '"action": "change"')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
'_popup': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"action": "delete"')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(ROOT_URLCONF='admin_views.urls')
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_field_prefix_css_classes(self):
"""
Ensure that fields have a CSS class name with a 'field-' prefix.
Refs #16371.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
Ensure that CSS class names are used for each app and model on the
admin index pages.
Refs #17050.
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF='admin_views.urls')
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
ROOT_URLCONF='admin_views.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def tearDown(self):
formats.reset_format_cache()
def assert_non_localized_year(self, response, year):
"""Ensure that the year is not localized with
USE_THOUSAND_SEPARATOR. Refs #15234.
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
Ensure that no date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Ensure that single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
Ensure that day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
Ensure that month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
Ensure that year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomSaveRelatedTests(TestCase):
"""
Ensure that one can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
self.assertFalse(response.context['has_permission'])
self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
self.assertContains(response, '<input type="hidden" name="next" value="%s" />' % reverse('admin:index'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(
change_user_url
)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy2').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
with override_script_prefix('/prefix/'):
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2"}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'),
post_data)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6,
family_name="Test1")
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1"}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_check(self):
"Ensure that the view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(admin.check(), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"Ensure that the default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,)))
self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name))
def test_missing_get_absolute_url(self):
"Ensure None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(ROOT_URLCONF='admin_views.urls')
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_true(self):
"Ensure that the 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name))
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestEtagWithAdminView(SimpleTestCase):
# See https://code.djangoproject.com/ticket/16003
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertTrue(response.has_header('ETag'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
| {
"content_hash": "b4f67b9effbb740bc8e434eac841d7a7",
"timestamp": "",
"source": "github",
"line_count": 6012,
"max_line_length": 119,
"avg_line_length": 46.26314038589488,
"alnum_prop": 0.6298618651441391,
"repo_name": "krisys/django",
"id": "48ce1770fbb40c3f338fc6d9d74039edb347efc8",
"size": "278269",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/admin_views/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52372"
},
{
"name": "HTML",
"bytes": "172129"
},
{
"name": "JavaScript",
"bytes": "255773"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11591744"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from zope.interface import implementer_only
from twisted.trial import unittest
from foolscap import schema, remoteinterface
from foolscap.api import RemoteInterface
from foolscap.remoteinterface import getRemoteInterface, RemoteMethodSchema
from foolscap.remoteinterface import RemoteInterfaceRegistry
from foolscap.tokens import Violation
from foolscap.referenceable import RemoteReference
from foolscap.test.common import TargetMixin
from foolscap.test.common import getRemoteInterfaceName, Target, RIMyTarget, \
RIMyTarget2, TargetWithoutInterfaces, IFoo, Foo, TypesTarget, RIDummy, \
DummyTarget
@implementer_only(IFoo, RIMyTarget2)
class Target2(Target):
pass
class TestInterface(TargetMixin, unittest.TestCase):
def testTypes(self):
self.assertTrue(isinstance(RIMyTarget,
remoteinterface.RemoteInterfaceClass))
self.assertTrue(isinstance(RIMyTarget2,
remoteinterface.RemoteInterfaceClass))
def testRegister(self):
reg = RemoteInterfaceRegistry
self.assertEqual(reg["RIMyTarget"], RIMyTarget)
self.assertEqual(reg["RIMyTargetInterface2"], RIMyTarget2)
def testDuplicateRegistry(self):
try:
class RIMyTarget(RemoteInterface):
def foo(bar=int): return int
except remoteinterface.DuplicateRemoteInterfaceError:
pass
else:
self.fail("duplicate registration not caught")
def testInterface1(self):
# verify that we extract the right interfaces from a local object.
# also check that the registry stuff works.
self.setupBrokers()
rr, target = self.setupTarget(Target())
iface = getRemoteInterface(target)
self.assertEqual(iface, RIMyTarget)
iname = getRemoteInterfaceName(target)
self.assertEqual(iname, "RIMyTarget")
self.failUnlessIdentical(RemoteInterfaceRegistry["RIMyTarget"],
RIMyTarget)
rr, target = self.setupTarget(Target2())
iname = getRemoteInterfaceName(target)
self.assertEqual(iname, "RIMyTargetInterface2")
self.failUnlessIdentical(\
RemoteInterfaceRegistry["RIMyTargetInterface2"], RIMyTarget2)
def testInterface2(self):
# verify that RemoteInterfaces have the right attributes
t = Target()
iface = getRemoteInterface(t)
self.assertEqual(iface, RIMyTarget)
# 'add' is defined with 'def'
s1 = RIMyTarget['add']
self.assertTrue(isinstance(s1, RemoteMethodSchema))
ok, s2 = s1.getKeywordArgConstraint("a")
self.assertTrue(ok)
self.assertTrue(isinstance(s2, schema.IntegerConstraint))
self.assertTrue(s2.checkObject(12, False) == None)
self.assertRaises(schema.Violation,
s2.checkObject, "string", False)
s3 = s1.getResponseConstraint()
self.assertTrue(isinstance(s3, schema.IntegerConstraint))
# 'add1' is defined as a class attribute
s1 = RIMyTarget['add1']
self.assertTrue(isinstance(s1, RemoteMethodSchema))
ok, s2 = s1.getKeywordArgConstraint("a")
self.assertTrue(ok)
self.assertTrue(isinstance(s2, schema.IntegerConstraint))
self.assertTrue(s2.checkObject(12, False) == None)
self.assertRaises(schema.Violation,
s2.checkObject, "string", False)
s3 = s1.getResponseConstraint()
self.assertTrue(isinstance(s3, schema.IntegerConstraint))
s1 = RIMyTarget['join']
self.assertTrue(isinstance(s1.getKeywordArgConstraint("a")[1],
schema.StringConstraint))
self.assertTrue(isinstance(s1.getKeywordArgConstraint("c")[1],
schema.IntegerConstraint))
s3 = RIMyTarget['join'].getResponseConstraint()
self.assertTrue(isinstance(s3, schema.StringConstraint))
s1 = RIMyTarget['disputed']
self.assertTrue(isinstance(s1.getKeywordArgConstraint("a")[1],
schema.IntegerConstraint))
s3 = s1.getResponseConstraint()
self.assertTrue(isinstance(s3, schema.IntegerConstraint))
def testInterface3(self):
t = TargetWithoutInterfaces()
iface = getRemoteInterface(t)
self.assertFalse(iface)
def testStack(self):
# when you violate your outbound schema, the Failure you get should
# have a stack trace that includes the actual callRemote invocation.
# Sometimes the stack trace doesn't include source code (either we
# have .pyc files but not .py files, or because the code is coming
# from an .egg). So this test merely asserts that test_interfaces.py
# is present in the trace, followed by either a source code line that
# mentions callRemote, or the filename/linenumber/functionname line
# that mentions callRemote.
self.setupBrokers()
rr, target = self.setupTarget(Target(), True)
d = rr.callRemote('add', "not a number", "oops")
def _check_failure(f):
s = f.getTraceback().split("\n")
for i in range(len(s)):
line = s[i]
if ("test_interfaces.py" in line
and i+2 < len(s)
and ("rr.callRemote" in s[i+1]
or "in callRemote" in s[i+2])):
return # all good
print("failure looked like this:")
print(f)
self.fail("didn't see invocation of callRemote in stacktrace")
d.addCallbacks(lambda res: self.fail("hey, this was supposed to fail"),
_check_failure)
return d
class Types(TargetMixin, unittest.TestCase):
def setUp(self):
TargetMixin.setUp(self)
self.setupBrokers()
def deferredShouldFail(self, d, ftype=None, checker=None):
if not ftype and not checker:
d.addCallbacks(lambda res:
self.fail("hey, this was supposed to fail"),
lambda f: None)
elif ftype and not checker:
d.addCallbacks(lambda res:
self.fail("hey, this was supposed to fail"),
lambda f: f.trap(ftype) or None)
else:
d.addCallbacks(lambda res:
self.fail("hey, this was supposed to fail"),
checker)
def testCall(self):
rr, target = self.setupTarget(Target(), True)
d = rr.callRemote('add', 3, 4) # enforces schemas
d.addCallback(lambda res: self.assertEqual(res, 7))
return d
def testFail(self):
# make sure exceptions (and thus CopiedFailures) pass a schema check
rr, target = self.setupTarget(Target(), True)
d = rr.callRemote('fail')
self.deferredShouldFail(d, ftype=ValueError)
return d
def testNoneGood(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('returns_none', True)
d.addCallback(lambda res: self.assertEqual(res, None))
return d
def testNoneBad(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('returns_none', False)
def _check_failure(f):
f.trap(Violation)
self.failUnlessIn("(in return value of <foolscap.test.common.TypesTarget object", str(f))
self.failUnlessIn(">.returns_none", str(f))
self.failUnlessIn("'not None' is not None", str(f))
self.deferredShouldFail(d, checker=_check_failure)
return d
def testTakesRemoteInterfaceGood(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('takes_remoteinterface', DummyTarget())
d.addCallback(lambda res: self.assertEqual(res, "good"))
return d
def testTakesRemoteInterfaceBad(self):
rr, target = self.setupTarget(TypesTarget(), True)
# takes_remoteinterface is specified to accept an RIDummy
d = rr.callRemote('takes_remoteinterface', 12)
def _check_failure(f):
f.trap(Violation)
self.failUnlessIn("RITypes.takes_remoteinterface(a=))", str(f))
self.failUnlessIn("'12' is not a Referenceable", str(f))
self.deferredShouldFail(d, checker=_check_failure)
return d
def testTakesRemoteInterfaceBad2(self):
rr, target = self.setupTarget(TypesTarget(), True)
# takes_remoteinterface is specified to accept an RIDummy
d = rr.callRemote('takes_remoteinterface', TypesTarget())
def _check_failure(f):
f.trap(Violation)
self.failUnlessIn("RITypes.takes_remoteinterface(a=))", str(f))
self.failUnlessIn(" does not provide RemoteInterface ", str(f))
self.failUnlessIn("foolscap.test.common.RIDummy", str(f))
self.deferredShouldFail(d, checker=_check_failure)
return d
def failUnlessRemoteProvides(self, obj, riface):
# TODO: really, I want to just be able to say:
# self.failUnless(RIDummy.providedBy(res))
iface = obj.tracker.interface
# TODO: this test probably doesn't handle subclasses of
# RemoteInterface, which might be useful (if it even works)
if not iface or iface != riface:
self.fail("%s does not provide RemoteInterface %s" % (obj, riface))
def testReturnsRemoteInterfaceGood(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('returns_remoteinterface', 1)
def _check(res):
self.assertTrue(isinstance(res, RemoteReference))
#self.failUnless(RIDummy.providedBy(res))
self.failUnlessRemoteProvides(res, RIDummy)
d.addCallback(_check)
return d
def testReturnsRemoteInterfaceBad(self):
rr, target = self.setupTarget(TypesTarget(), True)
# returns_remoteinterface is specified to return an RIDummy
d = rr.callRemote('returns_remoteinterface', 0)
def _check_failure(f):
f.trap(Violation)
self.failUnlessIn("(in return value of <foolscap.test.common.TypesTarget object at ", str(f))
self.failUnlessIn(">.returns_remoteinterface)", str(f))
self.failUnlessIn("'15' is not a Referenceable", str(f))
self.deferredShouldFail(d, checker=_check_failure)
return d
def testReturnsRemoteInterfaceBad2(self):
rr, target = self.setupTarget(TypesTarget(), True)
# returns_remoteinterface is specified to return an RIDummy
d = rr.callRemote('returns_remoteinterface', -1)
def _check_failure(f):
f.trap(Violation)
self.failUnlessIn("(in return value of <foolscap.test.common.TypesTarget object at ", str(f))
self.failUnlessIn(">.returns_remoteinterface)", str(f))
self.failUnlessIn("<foolscap.test.common.TypesTarget object ",
str(f))
self.failUnlessIn(" does not provide RemoteInterface ", str(f))
self.failUnlessIn("foolscap.test.common.RIDummy", str(f))
self.deferredShouldFail(d, checker=_check_failure)
return d
class LocalTypes(TargetMixin, unittest.TestCase):
def setUp(self):
TargetMixin.setUp(self)
self.setupBrokers()
def testTakesInterfaceGood(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('takes_interface', DummyTarget())
d.addCallback(lambda res: self.assertEqual(res, "good"))
return d
def testTakesInterfaceBad(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('takes_interface', Foo())
def _check_failure(f):
f.trap(Violation)
print(f)
self.deferredShouldFail(d, checker=_check_failure)
return d
def testReturnsInterfaceGood(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('returns_interface', True)
def _check(res):
#self.failUnless(isinstance(res, RemoteReference))
self.assertTrue(IFoo.providedBy(res))
d.addCallback(_check)
return d
def testReturnsInterfaceBad(self):
rr, target = self.setupTarget(TypesTarget(), True)
d = rr.callRemote('returns_interface', False)
def _check_failure(f):
f.trap(Violation)
print(f)
self.deferredShouldFail(d, checker=_check_failure)
return d
del LocalTypes # TODO: how could these tests possibly work? we need Guards.
| {
"content_hash": "74f590181d77ed7b0efd84309db604f4",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 105,
"avg_line_length": 42.27062706270627,
"alnum_prop": 0.6299188007495315,
"repo_name": "warner/foolscap",
"id": "01ef049d922d2a6d6d7750c4562679c0eeeb7fc5",
"size": "12865",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/foolscap/test/test_interfaces.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1254"
},
{
"name": "Python",
"bytes": "1233676"
},
{
"name": "Roff",
"bytes": "214037"
}
],
"symlink_target": ""
} |
"""Tests for single_return module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.autograph.converters import single_return
from tensorflow.contrib.autograph.core import converter_testing
from tensorflow.python.framework.ops import name_scope
from tensorflow.python.platform import test
class SingleReturnTest(converter_testing.TestCase):
def compiled_fn(self, test_fn, *args):
node = self.parse_and_analyze(test_fn, {})
node = single_return.transform(node, self.ctx)
module = self.compiled(node, *args)
return module
def test_noop(self):
# Noop
def test_fn(x):
return x
with self.compiled_fn(test_fn) as result:
self.assertEqual(test_fn(2.0), result.test_fn(2.0))
def test_return_expression(self):
# ANF
def test_fn(x):
return x * x
with self.compiled_fn(test_fn) as result:
x = 2
self.assertEqual(test_fn(x), result.test_fn(x))
def test_merge(self):
# Simple merge
def test_fn(x):
if x > 0:
return x
else:
return x * x
with self.compiled_fn(test_fn) as result:
for x in [-2, 2]:
self.assertEqual(test_fn(x), result.test_fn(x))
def test_orphan_branch(self):
def test_fn(x):
if x > 0:
return x
with self.assertRaises(ValueError):
self.compiled_fn(test_fn)
def test_lift_body_into_false_branch(self):
def test_fn(x):
if x > 0:
return x
return x * x
with self.compiled_fn(test_fn) as result:
for x in [-2, 2]:
self.assertEqual(test_fn(x), result.test_fn(x))
def test_lift_body_into_true_branch(self):
def test_fn(x):
if x < 0:
x *= x
else:
# TODO(alexbw): linter bug here that requires us suppress this warning.
return x # pylint: disable=undefined-loop-variable
return x
with self.compiled_fn(test_fn) as result:
for x in [-2, 2]:
self.assertEqual(test_fn(x), result.test_fn(x))
def test_nested_if(self):
def test_fn(x):
if x > 0:
if x < 5:
return x
else:
return x * x
else:
return x * x * x
with self.compiled_fn(test_fn) as result:
for x in [-2, 2, 5]:
self.assertEqual(test_fn(x), result.test_fn(x))
def test_context_manager(self):
def test_fn(x):
with name_scope(''):
return x * x
with self.compiled_fn(test_fn) as result:
result.name_scope = name_scope
for x in [-2, 2]:
self.assertEqual(test_fn(x), result.test_fn(x))
def test_context_manager_in_conditional(self):
def test_fn(x):
if x > 0:
with name_scope(''):
return x * x
else:
return x
with self.compiled_fn(test_fn, name_scope) as result:
result.name_scope = name_scope
for x in [-2, 2]:
self.assertEqual(test_fn(x), result.test_fn(x))
def text_conditional_in_context_manager(self):
def test_fn(x):
with name_scope(''):
if x > 0:
return x * x
else:
return x
with self.compiled_fn(test_fn) as result:
result.name_scope = name_scope
for x in [-2, 2]:
self.assertEqual(test_fn(x), result.test_fn(x))
def test_no_return(self):
def test_fn(x):
x *= x
with self.compiled_fn(test_fn) as result:
self.assertEqual(test_fn(2), result.test_fn(2))
def test_nested_functiondefs(self):
def test_fn(x):
def inner_fn(y):
if y > 0:
return y * y
else:
return y
return inner_fn(x)
with self.compiled_fn(test_fn) as result:
for x in [-2, 2]:
self.assertEqual(test_fn(x), result.test_fn(x))
def test_loop(self):
def test_fn(x):
for _ in range(10):
return x
return x
with self.assertRaises(ValueError):
self.compiled_fn(test_fn)
if __name__ == '__main__':
test.main()
| {
"content_hash": "7071dc229de4f49fd6a5a948d7d985ae",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 79,
"avg_line_length": 22.914285714285715,
"alnum_prop": 0.5880299251870325,
"repo_name": "caisq/tensorflow",
"id": "1f0de4310e370235a4a7bfeaa61bd519a81aff47",
"size": "4699",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/autograph/converters/single_return_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "309340"
},
{
"name": "C++",
"bytes": "44750055"
},
{
"name": "CMake",
"bytes": "206817"
},
{
"name": "Go",
"bytes": "1163781"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "795866"
},
{
"name": "Jupyter Notebook",
"bytes": "2266715"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52050"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99265"
},
{
"name": "PHP",
"bytes": "2140"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "38665761"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "447966"
},
{
"name": "Smarty",
"bytes": "6870"
}
],
"symlink_target": ""
} |
"""``tornado.gen`` is a generator-based interface to make it easier to
work in an asynchronous environment. Code using the ``gen`` module
is technically asynchronous, but it is written as a single generator
instead of a collection of separate functions.
For example, the following asynchronous handler::
class AsyncHandler(RequestHandler):
@asynchronous
def get(self):
http_client = AsyncHTTPClient()
http_client.fetch("http://example.com",
callback=self.on_fetch)
def on_fetch(self, response):
do_something_with_response(response)
self.render("template.html")
could be written with ``gen`` as::
class GenAsyncHandler(RequestHandler):
@asynchronous
@gen.coroutine
def get(self):
http_client = AsyncHTTPClient()
response = yield http_client.fetch("http://example.com")
do_something_with_response(response)
self.render("template.html")
Most asynchronous functions in Tornado return a `.Future`;
yielding this object returns its `~.Future.result`.
For functions that do not return ``Futures``, `Task` works with any
function that takes a ``callback`` keyword argument (most Tornado functions
can be used in either style, although the ``Future`` style is preferred
since it is both shorter and provides better exception handling)::
@gen.coroutine
def get(self):
yield gen.Task(AsyncHTTPClient().fetch, "http://example.com")
You can also yield a list of ``Futures`` and/or ``Tasks``, which will be
started at the same time and run in parallel; a list of results will
be returned when they are all finished::
@gen.coroutine
def get(self):
http_client = AsyncHTTPClient()
response1, response2 = yield [http_client.fetch(url1),
http_client.fetch(url2)]
For more complicated interfaces, `Task` can be split into two parts:
`Callback` and `Wait`::
class GenAsyncHandler2(RequestHandler):
@asynchronous
@gen.coroutine
def get(self):
http_client = AsyncHTTPClient()
http_client.fetch("http://example.com",
callback=(yield gen.Callback("key"))
response = yield gen.Wait("key")
do_something_with_response(response)
self.render("template.html")
The ``key`` argument to `Callback` and `Wait` allows for multiple
asynchronous operations to be started at different times and proceed
in parallel: yield several callbacks with different keys, then wait
for them once all the async operations have started.
The result of a `Wait` or `Task` yield expression depends on how the callback
was run. If it was called with no arguments, the result is ``None``. If
it was called with one argument, the result is that argument. If it was
called with more than one argument or any keyword arguments, the result
is an `Arguments` object, which is a named tuple ``(args, kwargs)``.
"""
from __future__ import absolute_import, division, print_function, with_statement
import collections
import functools
import itertools
import sys
import types
from tornado.concurrent import Future, TracebackFuture
from tornado.ioloop import IOLoop
from tornado.stack_context import ExceptionStackContext, wrap
class KeyReuseError(Exception):
pass
class UnknownKeyError(Exception):
pass
class LeakedCallbackError(Exception):
pass
class BadYieldError(Exception):
pass
class ReturnValueIgnoredError(Exception):
pass
def engine(func):
"""Callback-oriented decorator for asynchronous generators.
This is an older interface; for new code that does not need to be
compatible with versions of Tornado older than 3.0 the
`coroutine` decorator is recommended instead.
This decorator is similar to `coroutine`, except it does not
return a `.Future` and the ``callback`` argument is not treated
specially.
In most cases, functions decorated with `engine` should take
a ``callback`` argument and invoke it with their result when
they are finished. One notable exception is the
`~tornado.web.RequestHandler` ``get``/``post``/etc methods,
which use ``self.finish()`` in place of a callback argument.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
runner = None
def handle_exception(typ, value, tb):
# if the function throws an exception before its first "yield"
# (or is not a generator at all), the Runner won't exist yet.
# However, in that case we haven't reached anything asynchronous
# yet, so we can just let the exception propagate.
if runner is not None:
return runner.handle_exception(typ, value, tb)
return False
with ExceptionStackContext(handle_exception) as deactivate:
try:
result = func(*args, **kwargs)
except (Return, StopIteration) as e:
result = getattr(e, 'value', None)
else:
if isinstance(result, types.GeneratorType):
def final_callback(value):
if value is not None:
raise ReturnValueIgnoredError(
"@gen.engine functions cannot return values: "
"%r" % result)
assert value is None
deactivate()
runner = Runner(result, final_callback)
runner.run()
return
if result is not None:
raise ReturnValueIgnoredError(
"@gen.engine functions cannot return values: %r" % result)
deactivate()
# no yield, so we're done
return wrapper
def coroutine(func):
"""Decorator for asynchronous generators.
Any generator that yields objects from this module must be wrapped
in either this decorator or `engine`. These decorators only work
on functions that are already asynchronous. For
`~tornado.web.RequestHandler` ``get``/``post``/etc methods, this
means that both the `tornado.web.asynchronous` and
`tornado.gen.coroutine` decorators must be used (for proper
exception handling, ``asynchronous`` should come before
``gen.coroutine``).
Coroutines may "return" by raising the special exception
`Return(value) <Return>`. In Python 3.3+, it is also possible for
the function to simply use the ``return value`` statement (prior to
Python 3.3 generators were not allowed to also return values).
In all versions of Python a coroutine that simply wishes to exit
early may use the ``return`` statement without a value.
Functions with this decorator return a `.Future`. Additionally,
they may be called with a ``callback`` keyword argument, which
will be invoked with the future's result when it resolves. If the
coroutine fails, the callback will not be run and an exception
will be raised into the surrounding `.StackContext`. The
``callback`` argument is not visible inside the decorated
function; it is handled by the decorator itself.
From the caller's perspective, ``@gen.coroutine`` is similar to
the combination of ``@return_future`` and ``@gen.engine``.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
runner = None
future = TracebackFuture()
if 'callback' in kwargs:
callback = kwargs.pop('callback')
IOLoop.current().add_future(
future, lambda future: callback(future.result()))
def handle_exception(typ, value, tb):
try:
if runner is not None and runner.handle_exception(typ, value, tb):
return True
except Exception:
typ, value, tb = sys.exc_info()
future.set_exc_info((typ, value, tb))
return True
with ExceptionStackContext(handle_exception) as deactivate:
try:
result = func(*args, **kwargs)
except (Return, StopIteration) as e:
result = getattr(e, 'value', None)
except Exception:
deactivate()
future.set_exc_info(sys.exc_info())
return future
else:
if isinstance(result, types.GeneratorType):
def final_callback(value):
deactivate()
future.set_result(value)
runner = Runner(result, final_callback)
runner.run()
return future
deactivate()
future.set_result(result)
return future
return wrapper
class Return(Exception):
"""Special exception to return a value from a `coroutine`.
If this exception is raised, its value argument is used as the
result of the coroutine::
@gen.coroutine
def fetch_json(url):
response = yield AsyncHTTPClient().fetch(url)
raise gen.Return(json_decode(response.body))
In Python 3.3, this exception is no longer necessary: the ``return``
statement can be used directly to return a value (previously
``yield`` and ``return`` with a value could not be combined in the
same function).
By analogy with the return statement, the value argument is optional,
but it is never necessary to ``raise gen.Return()``. The ``return``
statement can be used with no arguments instead.
"""
def __init__(self, value=None):
super(Return, self).__init__()
self.value = value
class YieldPoint(object):
"""Base class for objects that may be yielded from the generator.
Applications do not normally need to use this class, but it may be
subclassed to provide additional yielding behavior.
"""
def start(self, runner):
"""Called by the runner after the generator has yielded.
No other methods will be called on this object before ``start``.
"""
raise NotImplementedError()
def is_ready(self):
"""Called by the runner to determine whether to resume the generator.
Returns a boolean; may be called more than once.
"""
raise NotImplementedError()
def get_result(self):
"""Returns the value to use as the result of the yield expression.
This method will only be called once, and only after `is_ready`
has returned true.
"""
raise NotImplementedError()
class Callback(YieldPoint):
"""Returns a callable object that will allow a matching `Wait` to proceed.
The key may be any value suitable for use as a dictionary key, and is
used to match ``Callbacks`` to their corresponding ``Waits``. The key
must be unique among outstanding callbacks within a single run of the
generator function, but may be reused across different runs of the same
function (so constants generally work fine).
The callback may be called with zero or one arguments; if an argument
is given it will be returned by `Wait`.
"""
def __init__(self, key):
self.key = key
def start(self, runner):
self.runner = runner
runner.register_callback(self.key)
def is_ready(self):
return True
def get_result(self):
return self.runner.result_callback(self.key)
class Wait(YieldPoint):
"""Returns the argument passed to the result of a previous `Callback`."""
def __init__(self, key):
self.key = key
def start(self, runner):
self.runner = runner
def is_ready(self):
return self.runner.is_ready(self.key)
def get_result(self):
return self.runner.pop_result(self.key)
class WaitAll(YieldPoint):
"""Returns the results of multiple previous `Callbacks <Callback>`.
The argument is a sequence of `Callback` keys, and the result is
a list of results in the same order.
`WaitAll` is equivalent to yielding a list of `Wait` objects.
"""
def __init__(self, keys):
self.keys = keys
def start(self, runner):
self.runner = runner
def is_ready(self):
return all(self.runner.is_ready(key) for key in self.keys)
def get_result(self):
return [self.runner.pop_result(key) for key in self.keys]
class Task(YieldPoint):
"""Runs a single asynchronous operation.
Takes a function (and optional additional arguments) and runs it with
those arguments plus a ``callback`` keyword argument. The argument passed
to the callback is returned as the result of the yield expression.
A `Task` is equivalent to a `Callback`/`Wait` pair (with a unique
key generated automatically)::
result = yield gen.Task(func, args)
func(args, callback=(yield gen.Callback(key)))
result = yield gen.Wait(key)
"""
def __init__(self, func, *args, **kwargs):
assert "callback" not in kwargs
self.args = args
self.kwargs = kwargs
self.func = func
def start(self, runner):
self.runner = runner
self.key = object()
runner.register_callback(self.key)
self.kwargs["callback"] = runner.result_callback(self.key)
self.func(*self.args, **self.kwargs)
def is_ready(self):
return self.runner.is_ready(self.key)
def get_result(self):
return self.runner.pop_result(self.key)
class YieldFuture(YieldPoint):
def __init__(self, future, io_loop=None):
self.future = future
self.io_loop = io_loop or IOLoop.current()
def start(self, runner):
self.runner = runner
self.key = object()
runner.register_callback(self.key)
self.io_loop.add_future(self.future, runner.result_callback(self.key))
def is_ready(self):
return self.runner.is_ready(self.key)
def get_result(self):
return self.runner.pop_result(self.key).result()
class Multi(YieldPoint):
"""Runs multiple asynchronous operations in parallel.
Takes a list of ``Tasks`` or other ``YieldPoints`` and returns a list of
their responses. It is not necessary to call `Multi` explicitly,
since the engine will do so automatically when the generator yields
a list of ``YieldPoints``.
"""
def __init__(self, children):
self.children = []
for i in children:
if isinstance(i, Future):
i = YieldFuture(i)
self.children.append(i)
assert all(isinstance(i, YieldPoint) for i in self.children)
self.unfinished_children = set(self.children)
def start(self, runner):
for i in self.children:
i.start(runner)
def is_ready(self):
finished = list(itertools.takewhile(
lambda i: i.is_ready(), self.unfinished_children))
self.unfinished_children.difference_update(finished)
return not self.unfinished_children
def get_result(self):
return [i.get_result() for i in self.children]
class _NullYieldPoint(YieldPoint):
def start(self, runner):
pass
def is_ready(self):
return True
def get_result(self):
return None
class Runner(object):
"""Internal implementation of `tornado.gen.engine`.
Maintains information about pending callbacks and their results.
``final_callback`` is run after the generator exits.
"""
def __init__(self, gen, final_callback):
self.gen = gen
self.final_callback = final_callback
self.yield_point = _NullYieldPoint()
self.pending_callbacks = set()
self.results = {}
self.running = False
self.finished = False
self.exc_info = None
self.had_exception = False
def register_callback(self, key):
"""Adds ``key`` to the list of callbacks."""
if key in self.pending_callbacks:
raise KeyReuseError("key %r is already pending" % key)
self.pending_callbacks.add(key)
def is_ready(self, key):
"""Returns true if a result is available for ``key``."""
if key not in self.pending_callbacks:
raise UnknownKeyError("key %r is not pending" % key)
return key in self.results
def set_result(self, key, result):
"""Sets the result for ``key`` and attempts to resume the generator."""
self.results[key] = result
self.run()
def pop_result(self, key):
"""Returns the result for ``key`` and unregisters it."""
self.pending_callbacks.remove(key)
return self.results.pop(key)
def run(self):
"""Starts or resumes the generator, running until it reaches a
yield point that is not ready.
"""
if self.running or self.finished:
return
try:
self.running = True
while True:
if self.exc_info is None:
try:
if not self.yield_point.is_ready():
return
next = self.yield_point.get_result()
except Exception:
self.exc_info = sys.exc_info()
try:
if self.exc_info is not None:
self.had_exception = True
exc_info = self.exc_info
self.exc_info = None
yielded = self.gen.throw(*exc_info)
else:
yielded = self.gen.send(next)
except (StopIteration, Return) as e:
self.finished = True
if self.pending_callbacks and not self.had_exception:
# If we ran cleanly without waiting on all callbacks
# raise an error (really more of a warning). If we
# had an exception then some callbacks may have been
# orphaned, so skip the check in that case.
raise LeakedCallbackError(
"finished without waiting for callbacks %r" %
self.pending_callbacks)
self.final_callback(getattr(e, 'value', None))
self.final_callback = None
return
except Exception:
self.finished = True
raise
if isinstance(yielded, list):
yielded = Multi(yielded)
elif isinstance(yielded, Future):
yielded = YieldFuture(yielded)
if isinstance(yielded, YieldPoint):
self.yield_point = yielded
try:
self.yield_point.start(self)
except Exception:
self.exc_info = sys.exc_info()
else:
self.exc_info = (BadYieldError("yielded unknown object %r" % yielded),)
finally:
self.running = False
def result_callback(self, key):
def inner(*args, **kwargs):
if kwargs or len(args) > 1:
result = Arguments(args, kwargs)
elif args:
result = args[0]
else:
result = None
self.set_result(key, result)
return wrap(inner)
def handle_exception(self, typ, value, tb):
if not self.running and not self.finished:
self.exc_info = (typ, value, tb)
self.run()
return True
else:
return False
Arguments = collections.namedtuple('Arguments', ['args', 'kwargs'])
| {
"content_hash": "5d5f0aecbf56eb6db125626fe44b1f7d",
"timestamp": "",
"source": "github",
"line_count": 560,
"max_line_length": 91,
"avg_line_length": 35.45,
"alnum_prop": 0.6081503123111022,
"repo_name": "hiphopsmurf/bitcoin-secured",
"id": "23b3c81c35fe59310492f133ad7971d55677fef8",
"size": "19852",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "online/build/tornado/build/lib.linux-x86_64-2.7/tornado/gen.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from setuptools import setup, find_packages
setup(
name='django-feedback',
version=__import__('feedback').__version__,
description='Basic Django Feedback',
author='Luke Hutscal',
author_email='luke@creaturecreative.com',
url='http://github.com/girasquid/django-feedback/',
packages=find_packages(),
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
include_package_data=True,
zip_safe=False,
) | {
"content_hash": "3eab7ac4065d6529f9541d76539e25cc",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 55,
"avg_line_length": 31.954545454545453,
"alnum_prop": 0.6301564722617354,
"repo_name": "girasquid/django-feedback",
"id": "13082a0e750a37fa70d2cfde5b054dbc89f6ae2b",
"size": "703",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "6871"
}
],
"symlink_target": ""
} |
"""Run regression test suite.
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts.
For a description of arguments recognized by test scripts, see
`test/functional/test_framework/test_framework.py:BitcoinTestFramework.main`.
"""
import argparse
from collections import deque
import configparser
import datetime
import os
import time
import shutil
import signal
import subprocess
import sys
import tempfile
import re
import logging
import unittest
# Formatting. Default colors to empty strings.
DEFAULT, BOLD, GREEN, RED = ("", ""), ("", ""), ("", ""), ("", "")
try:
# Make sure python thinks it can write unicode to its stdout
"\u2713".encode("utf_8").decode(sys.stdout.encoding)
TICK = "✓ "
CROSS = "✖ "
CIRCLE = "○ "
except UnicodeDecodeError:
TICK = "P "
CROSS = "x "
CIRCLE = "o "
if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393): #type:ignore
if os.name == 'nt':
import ctypes
kernel32 = ctypes.windll.kernel32 # type: ignore
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# Enable ascii color control to stdout
stdout = kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
stdout_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stdout, ctypes.byref(stdout_mode))
kernel32.SetConsoleMode(stdout, stdout_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# Enable ascii color control to stderr
stderr = kernel32.GetStdHandle(STD_ERROR_HANDLE)
stderr_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stderr, ctypes.byref(stderr_mode))
kernel32.SetConsoleMode(stderr, stderr_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# primitive formatting on supported
# terminal via ANSI escape sequences:
DEFAULT = ('\033[0m', '\033[0m')
BOLD = ('\033[0m', '\033[1m')
GREEN = ('\033[0m', '\033[0;32m')
RED = ('\033[0m', '\033[0;31m')
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
TEST_FRAMEWORK_MODULES = [
"address",
"blocktools",
"muhash",
"key",
"script",
"segwit_addr",
"util",
]
EXTENDED_SCRIPTS = [
# These tests are not run by default.
# Longest test should go first, to favor running tests in parallel
'feature_pruning.py',
'feature_dbcrash.py',
'feature_index_prune.py',
]
BASE_SCRIPTS = [
# Scripts that are run by default.
# Longest test should go first, to favor running tests in parallel
'wallet_hd.py --legacy-wallet',
'wallet_hd.py --descriptors',
'wallet_backup.py --legacy-wallet',
'wallet_backup.py --descriptors',
# vv Tests less than 5m vv
'mining_getblocktemplate_longpoll.py',
'feature_maxuploadtarget.py',
'feature_block.py',
'rpc_fundrawtransaction.py --legacy-wallet',
'rpc_fundrawtransaction.py --descriptors',
'p2p_compactblocks.py',
'p2p_compactblocks_blocksonly.py',
'feature_segwit.py --legacy-wallet',
'feature_segwit.py --descriptors',
# vv Tests less than 2m vv
'wallet_basic.py --legacy-wallet',
'wallet_basic.py --descriptors',
'wallet_labels.py --legacy-wallet',
'wallet_labels.py --descriptors',
'p2p_segwit.py',
'p2p_timeouts.py',
'p2p_tx_download.py',
'mempool_updatefromblock.py',
'wallet_dump.py --legacy-wallet',
'feature_taproot.py',
'rpc_signer.py',
'wallet_signer.py --descriptors',
# vv Tests less than 60s vv
'p2p_sendheaders.py',
'wallet_importmulti.py --legacy-wallet',
'mempool_limit.py',
'rpc_txoutproof.py',
'wallet_listreceivedby.py --legacy-wallet',
'wallet_listreceivedby.py --descriptors',
'wallet_abandonconflict.py --legacy-wallet',
'p2p_dns_seeds.py',
'wallet_abandonconflict.py --descriptors',
'feature_csv_activation.py',
'wallet_address_types.py --legacy-wallet',
'wallet_address_types.py --descriptors',
'feature_bip68_sequence.py',
'p2p_feefilter.py',
'rpc_packages.py',
'feature_reindex.py',
'feature_abortnode.py',
# vv Tests less than 30s vv
'wallet_keypool_topup.py --legacy-wallet',
'wallet_keypool_topup.py --descriptors',
'wallet_fast_rescan.py --descriptors',
'feature_fee_estimation.py',
'interface_zmq.py',
'rpc_invalid_address_message.py',
'interface_bitcoin_cli.py --legacy-wallet',
'interface_bitcoin_cli.py --descriptors',
'feature_bind_extra.py',
'mempool_resurrect.py',
'wallet_txn_doublespend.py --mineblock',
'tool_wallet.py --legacy-wallet',
'tool_wallet.py --descriptors',
'tool_signet_miner.py --legacy-wallet',
'tool_signet_miner.py --descriptors',
'wallet_txn_clone.py',
'wallet_txn_clone.py --segwit',
'rpc_getchaintips.py',
'rpc_misc.py',
'interface_rest.py',
'mempool_spend_coinbase.py',
'wallet_avoidreuse.py --legacy-wallet',
'wallet_avoidreuse.py --descriptors',
'wallet_avoid_mixing_output_types.py --descriptors',
'mempool_reorg.py',
'mempool_persist.py',
'p2p_block_sync.py',
'wallet_multiwallet.py --legacy-wallet',
'wallet_multiwallet.py --descriptors',
'wallet_multiwallet.py --usecli',
'wallet_createwallet.py --legacy-wallet',
'wallet_createwallet.py --usecli',
'wallet_createwallet.py --descriptors',
'wallet_listtransactions.py --legacy-wallet',
'wallet_listtransactions.py --descriptors',
'wallet_watchonly.py --legacy-wallet',
'wallet_watchonly.py --usecli --legacy-wallet',
'wallet_reorgsrestore.py',
'interface_http.py',
'interface_rpc.py',
'interface_usdt_coinselection.py',
'interface_usdt_net.py',
'interface_usdt_utxocache.py',
'interface_usdt_validation.py',
'rpc_psbt.py --legacy-wallet',
'rpc_psbt.py --descriptors',
'rpc_users.py',
'rpc_whitelist.py',
'feature_proxy.py',
'feature_syscall_sandbox.py',
'wallet_signrawtransactionwithwallet.py --legacy-wallet',
'wallet_signrawtransactionwithwallet.py --descriptors',
'rpc_signrawtransactionwithkey.py',
'p2p_headers_sync_with_minchainwork.py',
'rpc_rawtransaction.py --legacy-wallet',
'wallet_groups.py --legacy-wallet',
'wallet_transactiontime_rescan.py --descriptors',
'wallet_transactiontime_rescan.py --legacy-wallet',
'p2p_addrv2_relay.py',
'wallet_groups.py --descriptors',
'p2p_compactblocks_hb.py',
'p2p_disconnect_ban.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
'rpc_deprecated.py',
'wallet_disable.py',
'p2p_addr_relay.py',
'p2p_getaddr_caching.py',
'p2p_getdata.py',
'p2p_addrfetch.py',
'rpc_net.py',
'wallet_keypool.py --legacy-wallet',
'wallet_keypool.py --descriptors',
'wallet_descriptor.py --descriptors',
'wallet_miniscript.py',
'feature_maxtipage.py',
'p2p_nobloomfilter_messages.py',
'p2p_filter.py',
'rpc_setban.py',
'p2p_blocksonly.py',
'mining_prioritisetransaction.py',
'p2p_invalid_locator.py',
'p2p_invalid_block.py',
'p2p_invalid_messages.py',
'p2p_invalid_tx.py',
'feature_assumevalid.py',
'example_test.py',
'wallet_txn_doublespend.py --legacy-wallet',
'wallet_multisig_descriptor_psbt.py',
'wallet_txn_doublespend.py --descriptors',
'feature_backwards_compatibility.py --legacy-wallet',
'feature_backwards_compatibility.py --descriptors',
'wallet_txn_clone.py --mineblock',
'feature_notifications.py',
'rpc_getblockfilter.py',
'rpc_getblockfrompeer.py',
'rpc_invalidateblock.py',
'feature_utxo_set_hash.py',
'feature_rbf.py',
'mempool_packages.py',
'mempool_package_onemore.py',
'rpc_createmultisig.py',
'mempool_package_limits.py',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py --legacy-wallet',
'wallet_importprunedfunds.py --descriptors',
'p2p_leak_tx.py',
'p2p_eviction.py',
'wallet_signmessagewithaddress.py',
'rpc_signmessagewithprivkey.py',
'rpc_generate.py',
'wallet_balance.py --legacy-wallet',
'wallet_balance.py --descriptors',
'p2p_initial_headers_sync.py',
'feature_nulldummy.py',
'mempool_accept.py',
'mempool_expiry.py',
'wallet_import_rescan.py --legacy-wallet',
'wallet_import_with_label.py --legacy-wallet',
'wallet_importdescriptors.py --descriptors',
'wallet_upgradewallet.py --legacy-wallet',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
'wallet_crosschain.py',
'mining_basic.py',
'feature_signet.py',
'wallet_bumpfee.py --legacy-wallet',
'wallet_bumpfee.py --descriptors',
'wallet_implicitsegwit.py --legacy-wallet',
'rpc_named_arguments.py',
'feature_startupnotify.py',
'wallet_simulaterawtx.py --legacy-wallet',
'wallet_simulaterawtx.py --descriptors',
'wallet_listsinceblock.py --legacy-wallet',
'wallet_listsinceblock.py --descriptors',
'wallet_listdescriptors.py --descriptors',
'p2p_leak.py',
'wallet_encryption.py --legacy-wallet',
'wallet_encryption.py --descriptors',
'feature_dersig.py',
'feature_cltv.py',
'rpc_uptime.py',
'feature_discover.py',
'wallet_resendwallettransactions.py --legacy-wallet',
'wallet_resendwallettransactions.py --descriptors',
'wallet_fallbackfee.py --legacy-wallet',
'wallet_fallbackfee.py --descriptors',
'rpc_dumptxoutset.py',
'feature_minchainwork.py',
'rpc_estimatefee.py',
'rpc_getblockstats.py',
'feature_bind_port_externalip.py',
'wallet_create_tx.py --legacy-wallet',
'wallet_send.py --legacy-wallet',
'wallet_send.py --descriptors',
'wallet_sendall.py --legacy-wallet',
'wallet_sendall.py --descriptors',
'wallet_create_tx.py --descriptors',
'wallet_taproot.py',
'wallet_inactive_hdchains.py',
'p2p_fingerprint.py',
'feature_uacomment.py',
'feature_init.py',
'wallet_coinbase_category.py --legacy-wallet',
'wallet_coinbase_category.py --descriptors',
'feature_filelock.py',
'feature_loadblock.py',
'p2p_dos_header_tree.py',
'p2p_add_connections.py',
'feature_bind_port_discover.py',
'p2p_unrequested_blocks.py',
'p2p_blockfilters.py',
'p2p_message_capture.py',
'feature_includeconf.py',
'feature_addrman.py',
'feature_asmap.py',
'mempool_unbroadcast.py',
'mempool_compatibility.py',
'mempool_accept_wtxid.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'p2p_ping.py',
'rpc_scanblocks.py',
'p2p_sendtxrcncl.py',
'rpc_scantxoutset.py',
'feature_txindex_compatibility.py',
'feature_unsupported_utxo_db.py',
'feature_logging.py',
'feature_anchors.py',
'mempool_datacarrier.py',
'feature_coinstatsindex.py',
'wallet_orphanedreward.py',
'wallet_timelock.py',
'p2p_node_network_limited.py',
'p2p_permissions.py',
'feature_blocksdir.py',
'wallet_startup.py',
'p2p_i2p_ports.py',
'p2p_i2p_sessions.py',
'feature_config_args.py',
'feature_presegwit_node_upgrade.py',
'feature_settings.py',
'rpc_getdescriptorinfo.py',
'rpc_mempool_info.py',
'rpc_help.py',
'feature_dirsymlinks.py',
'feature_help.py',
'feature_shutdown.py',
'wallet_migration.py',
'p2p_ibd_txrelay.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
PARTICL_SCRIPTS = [
'p2p_part_fork.py',
'feature_part_pos.py',
'feature_part_extkey.py',
'feature_part_stealth.py',
'feature_part_blind.py',
'feature_part_anon.py',
'feature_part_taproot.py',
'wallet_part_particl.py',
'rpc_part_mnemonic.py',
'feature_part_smsg.py',
'feature_part_smsgpaid.py',
'feature_part_smsgpaidfee.py',
'wallet_part_multisig.py',
'wallet_part_multiwallet.py',
'feature_part_coldstaking.py',
'rpc_part_filtertransactions.py',
'feature_part_vote.py',
'feature_part_zmq_test.py',
'rpc_part_wallet.py',
'feature_part_usbdevice.py',
'wallet_part_watchonly.py',
'rpc_part_atomicswap.py',
'rpc_part_signmessage.py',
'wallet_part_avoidreuse.py',
'wallet_part_segwit_scripts.py',
'p2p_part_disable_types.py',
]
PARTICL_SCRIPTS_EXT = [
'feature_part_smsg_multiwallet.py',
'feature_part_smsg_rollingcache.py',
'feature_part_treasury_fund.py',
'rpc_part_tracefrozenoutputs.py',
'feature_part_vote_extra.py',
'wallet_part_unloadspent.py',
'p2p_part_dos.py',
'feature_part_smsgpaidfee_ext.py',
]
INSIGHT_SCRIPTS = [
'feature_ins_addressindex.py',
'feature_ins_timestampindex.py',
'feature_ins_spentindex.py',
'feature_ins_txindex.py',
'feature_ins_csindex.py',
'feature_ins_balancesindex.py',
]
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS + PARTICL_SCRIPTS + INSIGHT_SCRIPTS + PARTICL_SCRIPTS_EXT
NON_SCRIPTS = [
# These are python files that live in the functional tests directory, but are not test scripts.
"combine_logs.py",
"create_cache.py",
"test_runner.py",
]
gArgs = None
def main():
global gArgs
# Parse arguments and pass through unrecognised args
parser = argparse.ArgumentParser(add_help=False,
usage='%(prog)s [test_runner.py options] [script options] [scripts]',
description=__doc__,
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--ansi', action='store_true', default=sys.stdout.isatty(), help="Use ANSI colors and dots in output (enabled by default when standard output is a TTY)")
parser.add_argument('--combinedlogslen', '-c', type=int, default=0, metavar='n', help='On failure, print a log (of length n lines) to the console, combined from the test framework and all test nodes.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests')
parser.add_argument('--bitcoin', action='store_true', help='run Bitcoin specific tests')
parser.add_argument('--particl', action='store_true', help='run Particl specific tests')
parser.add_argument('--particlext', action='store_true', help='run Particl extended tests')
parser.add_argument('--insight', action='store_true', help='run Insight specific tests')
parser.add_argument('--withstdout', action='store_true', help='print stdout when test passed also')
parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit')
parser.add_argument('--jobs', '-j', type=int, default=4, help='how many test scripts to run in parallel. Default=4.')
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
parser.add_argument('--quiet', '-q', action='store_true', help='only print dots, results summary and failure logs')
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
parser.add_argument('--failfast', '-F', action='store_true', help='stop execution after the first test failure')
parser.add_argument('--filter', help='filter scripts to run by regular expression')
args, unknown_args = parser.parse_known_args()
gArgs = args
if not args.ansi:
global DEFAULT, BOLD, GREEN, RED
DEFAULT = ("", "")
BOLD = ("", "")
GREEN = ("", "")
RED = ("", "")
# args to be passed on always start with two dashes; tests are the remaining unknown args
tests = [arg for arg in unknown_args if arg[:2] != "--"]
passon_args = [arg for arg in unknown_args if arg[:2] == "--"]
# Read config generated by configure.
config = configparser.ConfigParser()
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
config.read_file(open(configfile, encoding="utf8"))
passon_args.append("--configfile=%s" % configfile)
# Set up logging
logging_level = logging.INFO if args.quiet else logging.DEBUG
logging.basicConfig(format='%(message)s', level=logging_level)
# Create base test directory
tmpdir = "%s/particl_test_runner_P_🏃_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
os.makedirs(tmpdir)
logging.debug("Temporary test directory at %s" % tmpdir)
enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND")
if not enable_bitcoind:
print("No functional tests to run.")
print("Rerun ./configure with --with-daemon and then make")
sys.exit(0)
# Build list of tests
test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept names with or without a .py extension.
# Specified tests can contain wildcards, but in that case the supplied
# paths should be coherent, e.g. the same path as that provided to call
# test_runner.py. Examples:
# `test/functional/test_runner.py test/functional/wallet*`
# `test/functional/test_runner.py ./test/functional/wallet*`
# `test_runner.py wallet*`
# but not:
# `test/functional/test_runner.py wallet*`
# Multiple wildcards can be passed:
# `test_runner.py tool* mempool*`
for test in tests:
script = test.split("/")[-1]
script = script + ".py" if ".py" not in script else script
matching_scripts = [s for s in ALL_SCRIPTS if s.startswith(script)]
if matching_scripts:
test_list.extend(matching_scripts)
else:
print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
#elif args.extended:
# Include extended tests
# test_list += ALL_SCRIPTS
else:
# No individual tests have been specified.
# Run all base tests, and optionally run extended tests.
test_list = []
if args.extended:
test_list += EXTENDED_SCRIPTS
if args.particl:
test_list += PARTICL_SCRIPTS
if args.insight:
test_list += INSIGHT_SCRIPTS
if args.bitcoin:
test_list += BASE_SCRIPTS
if args.particlext:
test_list += PARTICL_SCRIPTS_EXT
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
exclude_tests = [test.split('.py')[0] for test in args.exclude.split(',')]
for exclude_test in exclude_tests:
# Remove <test_name>.py and <test_name>.py --arg from the test list
exclude_list = [test for test in test_list if test.split('.py')[0] == exclude_test]
for exclude_item in exclude_list:
test_list.remove(exclude_item)
if not exclude_list:
print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test))
if args.filter:
test_list = list(filter(re.compile(args.filter).search, test_list))
if not test_list:
print("No valid test scripts specified. Check that your test is in one "
"of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests")
sys.exit(0)
if args.help:
# Print help for test_runner.py, then print help of the first script (with args removed) and exit.
parser.print_help()
subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h'])
sys.exit(0)
check_script_list(src_dir=config["environment"]["SRCDIR"], fail_on_warn=args.ci)
check_script_prefixes()
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
run_tests(
test_list=test_list,
src_dir=config["environment"]["SRCDIR"],
build_dir=config["environment"]["BUILDDIR"],
tmpdir=tmpdir,
jobs=args.jobs,
enable_coverage=args.coverage,
args=passon_args,
combined_logs_len=args.combinedlogslen,
failfast=args.failfast,
use_term_control=args.ansi,
create_cache=(True if args.bitcoin or (not args.particl and not args.insight) else False)
)
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, use_term_control, create_cache=True):
args = args or []
# Warn if bitcoind is already running
try:
# pgrep exits with code zero when one or more matching processes found
if subprocess.run(["pgrep", "-x", "particld"], stdout=subprocess.DEVNULL).returncode == 0:
print("%sWARNING!%s There is already a particld process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0]))
except OSError:
# pgrep not supported
pass
# Warn if there is a cache directory
cache_dir = "%s/test/cache" % build_dir
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
# Test Framework Tests
print("Running Unit Tests for Test Framework Modules")
test_framework_tests = unittest.TestSuite()
for module in TEST_FRAMEWORK_MODULES:
test_framework_tests.addTest(unittest.TestLoader().loadTestsFromName("test_framework.{}".format(module)))
result = unittest.TextTestRunner(verbosity=1, failfast=True).run(test_framework_tests)
if not result.wasSuccessful():
logging.debug("Early exiting after failure in TestFramework unit tests")
sys.exit(False)
tests_dir = src_dir + '/test/functional/'
flags = ['--cachedir={}'.format(cache_dir)] + args
if enable_coverage:
coverage = RPCCoverage()
flags.append(coverage.flag)
logging.debug("Initializing coverage directory at %s" % coverage.dir)
else:
coverage = None
if len(test_list) > 1 and jobs > 1 and create_cache:
# Populate cache
try:
subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
except subprocess.CalledProcessError as e:
sys.stdout.buffer.write(e.output)
raise
#Run Tests
job_queue = TestHandler(
num_tests_parallel=jobs,
tests_dir=tests_dir,
tmpdir=tmpdir,
test_list=test_list,
flags=flags,
use_term_control=use_term_control,
)
start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
test_count = len(test_list)
all_passed = True
i = 0
while i < test_count:
if failfast and not all_passed:
break
for test_result, testdir, stdout, stderr, skip_reason in job_queue.get_next():
test_results.append(test_result)
i += 1
done_str = "{}/{} - {}{}{}".format(i, test_count, BOLD[1], test_result.name, BOLD[0])
if test_result.status == "Passed":
if gArgs.withstdout:
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
logging.debug("%s passed, Duration: %s s" % (done_str, test_result.time))
elif test_result.status == "Skipped":
logging.debug(f"{done_str} skipped ({skip_reason})")
else:
all_passed = False
print("%s failed, Duration: %s s\n" % (done_str, test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
if combined_logs_len and os.path.isdir(testdir):
# Print the final `combinedlogslen` lines of the combined logs
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
print('\n============')
print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
print('============\n')
combined_logs_args = [sys.executable, os.path.join(tests_dir, 'combine_logs.py'), testdir]
if BOLD[0]:
combined_logs_args += ['--color']
combined_logs, _ = subprocess.Popen(combined_logs_args, universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
if failfast:
logging.debug("Early exiting after test failure")
break
print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage_passed = coverage.report_rpc_coverage()
logging.debug("Cleaning up coverage data")
coverage.cleanup()
else:
coverage_passed = True
# Clear up the temp directory if all subdirectories are gone
if not os.listdir(tmpdir):
os.rmdir(tmpdir)
all_passed = all_passed and coverage_passed
# Clean up dangling processes if any. This may only happen with --failfast option.
# Killing the process group will also terminate the current process but that is
# not an issue
if not os.getenv("CI_FAILFAST_TEST_LEAVE_DANGLING") and len(job_queue.jobs):
os.killpg(os.getpgid(0), signal.SIGKILL)
sys.exit(not all_passed)
def print_results(test_results, max_len_name, runtime):
results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0]
test_results.sort(key=TestResult.sort_key)
all_passed = True
time_sum = 0
for test_result in test_results:
all_passed = all_passed and test_result.was_successful
time_sum += test_result.time
test_result.padding = max_len_name
results += str(test_result)
status = TICK + "Passed" if all_passed else CROSS + "Failed"
if not all_passed:
results += RED[1]
results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0]
if not all_passed:
results += RED[0]
results += "Runtime: %s s\n" % (runtime)
print(results)
class TestHandler:
"""
Trigger the test scripts passed in via the list.
"""
def __init__(self, *, num_tests_parallel, tests_dir, tmpdir, test_list, flags, use_term_control):
assert num_tests_parallel >= 1
self.num_jobs = num_tests_parallel
self.tests_dir = tests_dir
self.tmpdir = tmpdir
self.test_list = test_list
self.flags = flags
self.num_running = 0
self.jobs = []
self.use_term_control = use_term_control
def get_next(self):
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
test = self.test_list.pop(0)
portseed = len(self.test_list)
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
stdout=log_stdout,
stderr=log_stderr),
testdir,
log_stdout,
log_stderr))
if not self.jobs:
raise IndexError('pop from empty list')
# Print remaining running jobs when all jobs have been started.
if not self.test_list:
print("Remaining jobs: [{}]".format(", ".join(j[0] for j in self.jobs)))
dot_count = 0
while True:
# Return all procs that have finished, if any. Otherwise sleep until there is one.
time.sleep(.5)
ret = []
for job in self.jobs:
(name, start_time, proc, testdir, log_out, log_err) = job
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
[stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
skip_reason = None
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
elif proc.returncode == TEST_EXIT_SKIPPED:
status = "Skipped"
skip_reason = re.search(r"Test Skipped: (.*)", stdout).group(1)
else:
status = "Failed"
self.num_running -= 1
self.jobs.remove(job)
if self.use_term_control:
clearline = '\r' + (' ' * dot_count) + '\r'
print(clearline, end='', flush=True)
dot_count = 0
ret.append((TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr, skip_reason))
if ret:
return ret
if self.use_term_control:
print('.', end='', flush=True)
dot_count += 1
class TestResult():
def __init__(self, name, status, time):
self.name = name
self.status = status
self.time = time
self.padding = 0
def sort_key(self):
if self.status == "Passed":
return 0, self.name.lower()
elif self.status == "Failed":
return 2, self.name.lower()
elif self.status == "Skipped":
return 1, self.name.lower()
def __repr__(self):
if self.status == "Passed":
color = GREEN
glyph = TICK
elif self.status == "Failed":
color = RED
glyph = CROSS
elif self.status == "Skipped":
color = DEFAULT
glyph = CIRCLE
return color[1] + "%s | %s%s | %s s\n" % (self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0]
@property
def was_successful(self):
return self.status != "Failed"
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names)))
print(" %s" % ("\n ".join(sorted(bad_script_names))))
raise AssertionError("Some tests are not following naming convention!")
def check_script_list(*, src_dir, fail_on_warn):
"""Check scripts directory.
Check that there are no scripts in the functional tests directory which are
not being run by pull-tester.py."""
script_dir = src_dir + '/test/functional/'
python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
if fail_on_warn:
# On CI this warning is an error to prevent merging incomplete commits into master
sys.exit(1)
class RPCCoverage():
"""
Coverage reporting utilities for test_runner.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `bitcoin-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: test/functional/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir=%s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % command) for command in sorted(uncovered)))
return False
else:
print("All RPC commands covered.")
return True
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `test/functional/test_framework/coverage.py`
reference_filename = 'rpc_interface.txt'
coverage_file_prefix = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, reference_filename)
coverage_filenames = set()
all_cmds = set()
# Consider RPC generate covered, because it is overloaded in
# test_framework/test_node.py and not seen by the coverage check.
covered_cmds = set({'generate'})
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, _, files in os.walk(self.dir):
for filename in files:
if filename.startswith(coverage_file_prefix):
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
with open(filename, 'r', encoding="utf8") as coverage_file:
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
if __name__ == '__main__':
main()
| {
"content_hash": "7ba4b506ae8316f405d0cf7d74bbaa40",
"timestamp": "",
"source": "github",
"line_count": 914,
"max_line_length": 205,
"avg_line_length": 39.16849015317287,
"alnum_prop": 0.6195251396648045,
"repo_name": "particl/particl-core",
"id": "3c8d17cc3c9e37f3c3718b532534dda7194c600b",
"size": "36023",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/functional/test_runner.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28178"
},
{
"name": "Batchfile",
"bytes": "13"
},
{
"name": "C",
"bytes": "2889723"
},
{
"name": "C++",
"bytes": "13218778"
},
{
"name": "CMake",
"bytes": "29182"
},
{
"name": "Cap'n Proto",
"bytes": "1256"
},
{
"name": "Dockerfile",
"bytes": "1740"
},
{
"name": "HTML",
"bytes": "21833"
},
{
"name": "Java",
"bytes": "541"
},
{
"name": "M4",
"bytes": "229063"
},
{
"name": "Makefile",
"bytes": "159386"
},
{
"name": "Objective-C++",
"bytes": "5486"
},
{
"name": "Python",
"bytes": "3388224"
},
{
"name": "QMake",
"bytes": "1276"
},
{
"name": "Sage",
"bytes": "59728"
},
{
"name": "Scheme",
"bytes": "26427"
},
{
"name": "Shell",
"bytes": "190057"
}
],
"symlink_target": ""
} |
"""OAuth utility functions"""
import logging
from datetime import datetime
from django.conf import settings
from requests_oauthlib import OAuth2Session
from allauth.socialaccount.models import SocialAccount
DEFAULT_PRIVACY_LEVEL = getattr(settings, 'DEFAULT_PRIVACY_LEVEL', 'public')
log = logging.getLogger(__name__)
class Service(object):
"""Service mapping for local accounts
:param user: User to use in token lookup and session creation
:param account: :py:class:`SocialAccount` instance for user
"""
adapter = None
url_pattern = None
def __init__(self, user, account):
self.session = None
self.user = user
self.account = account
@classmethod
def for_user(cls, user):
"""Return a list of instances if user has an account for the provider"""
try:
accounts = SocialAccount.objects.filter(
user=user,
provider=cls.adapter.provider_id
)
return [cls(user=user, account=account) for account in accounts]
except SocialAccount.DoesNotExist:
return []
def get_adapter(self):
return self.adapter
@property
def provider_id(self):
return self.get_adapter().provider_id
def get_session(self):
if self.session is None:
self.create_session()
return self.session
def create_session(self):
"""Create OAuth session for user
This configures the OAuth session based on the :py:class:`SocialToken`
attributes. If there is an ``expires_at``, treat the session as an auto
renewing token. Some providers expire tokens after as little as 2
hours.
"""
token = self.account.socialtoken_set.first()
if token is None:
return None
token_config = {
'access_token': str(token.token),
'token_type': 'bearer',
}
if token.expires_at is not None:
token_expires = (token.expires_at - datetime.now()).total_seconds()
token_config.update({
'refresh_token': str(token.token_secret),
'expires_in': token_expires,
})
self.session = OAuth2Session(
client_id=token.app.client_id,
token=token_config,
auto_refresh_kwargs={
'client_id': token.app.client_id,
'client_secret': token.app.secret,
},
auto_refresh_url=self.get_adapter().access_token_url,
token_updater=self.token_updater(token)
)
return self.session or None
def token_updater(self, token):
"""Update token given data from OAuth response
Expect the following response into the closure::
{
u'token_type': u'bearer',
u'scopes': u'webhook repository team account',
u'refresh_token': u'...',
u'access_token': u'...',
u'expires_in': 3600,
u'expires_at': 1449218652.558185
}
"""
def _updater(data):
token.token = data['access_token']
token.expires_at = datetime.fromtimestamp(data['expires_at'])
token.save()
log.info('Updated token %s:', token)
return _updater
def sync(self):
raise NotImplementedError
def create_repository(self, fields, privacy=DEFAULT_PRIVACY_LEVEL,
organization=None):
raise NotImplementedError
def create_organization(self, fields):
raise NotImplementedError
def setup_webhook(self, project):
raise NotImplementedError
@classmethod
def is_project_service(cls, project):
"""Determine if this is the service the project is using
.. note::
This should be deprecated in favor of attaching the
:py:class:`RemoteRepository` to the project instance. This is a slight
improvement on the legacy check for webhooks
"""
# TODO Replace this check by keying project to remote repos
return (
cls.url_pattern is not None and
cls.url_pattern.search(project.repo) is not None
)
| {
"content_hash": "dd55466a408537025427602ae5622d6c",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 82,
"avg_line_length": 30.340425531914892,
"alnum_prop": 0.5897615708274895,
"repo_name": "tddv/readthedocs.org",
"id": "019a51faf9110d6dd291925786a4712a4192d993",
"size": "4278",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "readthedocs/oauth/services/base.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "4515"
},
{
"name": "CSS",
"bytes": "84305"
},
{
"name": "HTML",
"bytes": "236112"
},
{
"name": "JavaScript",
"bytes": "445655"
},
{
"name": "Makefile",
"bytes": "4594"
},
{
"name": "Python",
"bytes": "1146612"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.