code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
"""Mock UI tests."""
def test_something_ui():
pass
| not-raspberry/pytest_reorder | tests/sample_test_suites/nested/app_1/tests/ui/test_some_ui.py | Python | mit | 57 |
from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.sqlite3.introspection import (
DatabaseIntrospection, FlexibleFieldLookupDict,
)
from django.utils import six
class GeoFlexibleFieldLookupDict(FlexibleFieldLookupDict):
"""
Sublcass that includes updates the `base_data_types_reverse` dict
for geometry field types.
"""
base_data_types_reverse = FlexibleFieldLookupDict.base_data_types_reverse.copy()
base_data_types_reverse.update(
{'point': 'GeometryField',
'linestring': 'GeometryField',
'polygon': 'GeometryField',
'multipoint': 'GeometryField',
'multilinestring': 'GeometryField',
'multipolygon': 'GeometryField',
'geometrycollection': 'GeometryField',
})
class SpatiaLiteIntrospection(DatabaseIntrospection):
data_types_reverse = GeoFlexibleFieldLookupDict()
def get_geometry_type(self, table_name, geo_col):
cursor = self.connection.cursor()
try:
# Querying the `geometry_columns` table to get additional metadata.
cursor.execute('SELECT coord_dimension, srid, geometry_type '
'FROM geometry_columns '
'WHERE f_table_name=%s AND f_geometry_column=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row:
raise Exception('Could not find a geometry column for "%s"."%s"' %
(table_name, geo_col))
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
ogr_type = row[2]
if isinstance(ogr_type, six.integer_types) and ogr_type > 1000:
# Spatialite versions >= 4 use the new SFSQL 1.2 offsets
# 1000 (Z), 2000 (M), and 3000 (ZM) to indicate the presence of
# higher dimensional coordinates (M not yet supported by Django).
ogr_type = ogr_type % 1000 + OGRGeomType.wkb25bit
field_type = OGRGeomType(ogr_type).django
# Getting any GeometryField keyword arguments that are not the default.
dim = row[0]
srid = row[1]
field_params = {}
if srid != 4326:
field_params['srid'] = srid
if (isinstance(dim, six.string_types) and 'Z' in dim) or dim == 3:
field_params['dim'] = 3
finally:
cursor.close()
return field_type, field_params
def get_indexes(self, cursor, table_name):
indexes = super(SpatiaLiteIntrospection, self).get_indexes(cursor, table_name)
cursor.execute('SELECT f_geometry_column '
'FROM geometry_columns '
'WHERE f_table_name=%s AND spatial_index_enabled=1', (table_name,))
for row in cursor.fetchall():
indexes[row[0]] = {'primary_key': False, 'unique': False}
return indexes
| aisipos/django | django/contrib/gis/db/backends/spatialite/introspection.py | Python | bsd-3-clause | 3,027 |
# GNU Enterprise Forms - GF Object Hierarchy - Box
#
# Copyright 2001-2007 Free Software Foundation
#
# This file is part of GNU Enterprise
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# $Id: GFVBox.py,v 1.3 2008/11/04 20:14:16 oleg Exp $
"""
Logical box support
"""
from src.gnue.forms.GFObjects import GFBox
__all__ = ['GFVBox']
# =============================================================================
# <vbox>
# =============================================================================
class GFVBox(GFBox):
# -------------------------------------------------------------------------
# Constructor
# -------------------------------------------------------------------------
def __init__(self, parent=None):
GFBox.__init__(self, parent, "GFVBox")
| HarmonyEnterpriseSolutions/harmony-platform | src/gnue/forms/GFObjects/GFVBox.py | Python | gpl-2.0 | 1,438 |
from unittest import TestCase
import deepchem as dc
import numpy as np
from deepchem.data import NumpyDataset
from deepchem.data.datasets import Databag
from deepchem.models.tensorgraph.layers import Dense, ReduceMean, SoftMax, SoftMaxCrossEntropy
from deepchem.models.tensorgraph.layers import Feature, Label, Reshape
from deepchem.models.tensorgraph.layers import ReduceSquareDifference
from nose.tools import assert_true
from flaky import flaky
class TestGeneratorEvaluator(TestCase):
@flaky
def test_compute_model_performance_multitask_classifier(self):
n_data_points = 20
n_features = 1
n_tasks = 2
n_classes = 2
X = np.ones(shape=(n_data_points // 2, n_features)) * -1
X1 = np.ones(shape=(n_data_points // 2, n_features))
X = np.concatenate((X, X1))
class_1 = np.array([[0.0, 1.0] for x in range(int(n_data_points / 2))])
class_0 = np.array([[1.0, 0.0] for x in range(int(n_data_points / 2))])
y1 = np.concatenate((class_0, class_1))
y2 = np.concatenate((class_1, class_0))
y = np.stack([y1, y2], axis=1)
dataset = NumpyDataset(X, y)
features = Feature(shape=(None, n_features))
label = Label(shape=(None, n_tasks, n_classes))
dense = Dense(out_channels=n_tasks * n_classes, in_layers=[features])
logits = Reshape(shape=(None, n_tasks, n_classes), in_layers=dense)
output = SoftMax(in_layers=[logits])
smce = SoftMaxCrossEntropy(in_layers=[label, logits])
total_loss = ReduceMean(in_layers=smce)
tg = dc.models.TensorGraph(learning_rate=0.01, batch_size=n_data_points)
tg.add_output(output)
tg.set_loss(total_loss)
tg.fit(dataset, nb_epoch=1000)
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
scores = tg.evaluate_generator(
tg.default_generator(dataset), [metric],
labels=[label],
per_task_metrics=True)
scores = list(scores[1].values())
# Loosening atol to see if tests stop failing sporadically
assert_true(np.all(np.isclose(scores, [1.0, 1.0], atol=0.50)))
def test_compute_model_performance_singletask_classifier(self):
n_data_points = 20
n_features = 10
X = np.ones(shape=(int(n_data_points / 2), n_features)) * -1
X1 = np.ones(shape=(int(n_data_points / 2), n_features))
X = np.concatenate((X, X1))
class_1 = np.array([[0.0, 1.0] for x in range(int(n_data_points / 2))])
class_0 = np.array([[1.0, 0.0] for x in range(int(n_data_points / 2))])
y = np.concatenate((class_0, class_1))
dataset = NumpyDataset(X, y)
features = Feature(shape=(None, n_features))
label = Label(shape=(None, 2))
dense = Dense(out_channels=2, in_layers=[features])
output = SoftMax(in_layers=[dense])
smce = SoftMaxCrossEntropy(in_layers=[label, dense])
total_loss = ReduceMean(in_layers=smce)
tg = dc.models.TensorGraph(learning_rate=0.1)
tg.add_output(output)
tg.set_loss(total_loss)
tg.fit(dataset, nb_epoch=1000)
metric = dc.metrics.Metric(
dc.metrics.roc_auc_score, np.mean, mode="classification")
scores = tg.evaluate_generator(
tg.default_generator(dataset), [metric],
labels=[label],
per_task_metrics=True)
scores = list(scores[1].values())
assert_true(np.isclose(scores, [1.0], atol=0.05))
def test_compute_model_performance_multitask_regressor(self):
random_seed = 42
n_data_points = 20
n_features = 2
n_tasks = 2
np.random.seed(seed=random_seed)
X = np.random.rand(n_data_points, n_features)
y1 = np.array([0.5 for x in range(n_data_points)])
y2 = np.array([-0.5 for x in range(n_data_points)])
y = np.stack([y1, y2], axis=1)
dataset = NumpyDataset(X, y)
features = Feature(shape=(None, n_features))
label = Label(shape=(None, n_tasks))
dense = Dense(out_channels=n_tasks, in_layers=[features])
loss = ReduceSquareDifference(in_layers=[dense, label])
tg = dc.models.TensorGraph(random_seed=random_seed, learning_rate=0.1)
tg.add_output(dense)
tg.set_loss(loss)
tg.fit(dataset, nb_epoch=1000)
metric = [
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression"),
]
scores = tg.evaluate_generator(
tg.default_generator(dataset),
metric,
labels=[label],
per_task_metrics=True)
scores = list(scores[1].values())
assert_true(np.all(np.isclose(scores, [0.0, 0.0], atol=1.0)))
def test_compute_model_performance_singletask_regressor(self):
n_data_points = 20
n_features = 2
X = np.random.rand(n_data_points, n_features)
y1 = np.expand_dims(np.array([0.5 for x in range(n_data_points)]), axis=-1)
X = NumpyDataset(X)
ys = [NumpyDataset(y1)]
databag = Databag()
features = Feature(shape=(None, n_features))
databag.add_dataset(features, X)
outputs = []
losses = []
labels = []
for i in range(1):
label = Label(shape=(None, 1))
dense = Dense(out_channels=1, in_layers=[features])
loss = ReduceSquareDifference(in_layers=[dense, label])
outputs.append(dense)
losses.append(loss)
labels.append(label)
databag.add_dataset(label, ys[i])
total_loss = ReduceMean(in_layers=losses)
tg = dc.models.TensorGraph(mode="regression", learning_rate=0.1)
for output in outputs:
tg.add_output(output)
tg.set_loss(total_loss)
tg.fit_generator(
databag.iterbatches(
epochs=1000, batch_size=tg.batch_size, pad_batches=True))
metric = [
dc.metrics.Metric(
dc.metrics.mean_absolute_error, np.mean, mode="regression"),
]
scores = tg.evaluate_generator(
databag.iterbatches(batch_size=tg.batch_size),
metric,
labels=labels,
per_task_metrics=True)
scores = list(scores[1].values())
assert_true(np.all(np.isclose(scores, [0.0], atol=0.5)))
| ktaneishi/deepchem | deepchem/utils/test/test_generator_evaluator.py | Python | mit | 5,930 |
#!/usr/bin/env python
# Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
def run_quickstart(override_values={}):
# [START bigquerydatatransfer_quickstart]
from google.cloud import bigquery_datatransfer
client = bigquery_datatransfer.DataTransferServiceClient()
# TODO: Update to your project ID.
project_id = "my-project"
# [END bigquerydatatransfer_quickstart]
# To facilitate testing, we replace values with alternatives
# provided by the testing harness.
project_id = override_values.get("project_id", project_id)
# [START bigquerydatatransfer_quickstart]
# Get the full path to your project.
parent = client.common_project_path(project_id)
print("Supported Data Sources:")
# Iterate over all possible data sources.
for data_source in client.list_data_sources(parent=parent):
print("{}:".format(data_source.display_name))
print("\tID: {}".format(data_source.data_source_id))
print("\tFull path: {}".format(data_source.name))
print("\tDescription: {}".format(data_source.description))
# [END bigquerydatatransfer_quickstart]
if __name__ == "__main__":
run_quickstart(override_values={"project_id": sys.argv[1]})
| googleapis/python-bigquery-datatransfer | samples/snippets/quickstart.py | Python | apache-2.0 | 1,759 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-10-06 16:11
from __future__ import unicode_literals
from django.db import migrations
import share.robot
class Migration(migrations.Migration):
dependencies = [
('share', '0001_initial'),
('djcelery', '0001_initial'),
]
operations = [
migrations.RunPython(
code=share.robot.RobotUserMigration('edu.ucf'),
),
migrations.RunPython(
code=share.robot.RobotOauthTokenMigration('edu.ucf'),
),
migrations.RunPython(
code=share.robot.RobotScheduleMigration('edu.ucf'),
),
]
| zamattiac/SHARE | providers/edu/ucf/migrations/0001_initial.py | Python | apache-2.0 | 646 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from .scene import SceneManager | jsa4000/OpenGL-Python | zero/system/geometry/__init__.py | Python | apache-2.0 | 121 |
#!/usr/bin/env python
# Author: Marcos Almeida Jr
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
'''
This script extracts downloaded torrent and notify sb.
Configure uTorrent to pass the following command line:
/path/to/Sick-Beard/autoProcessTV/utorrentToSickBeard.py "%I" "%D"
You need to have installed unrar or 7z on your shell path.
'''
import sys, os, re, subprocess,inspect,logging
import autoProcessTV, ConfigParser, glob
import threading
scriptDir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
logfile = os.path.abspath(os.path.join(scriptDir, "..", "Logs", "sickbeard.log"))
loggerHeader = "UTORRENT-TO-SB :: "
logger = logging.getLogger('utorrentToSickbeard')
logger.setLevel(logging.DEBUG)
loggerFormat = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s', '%b-%d %H:%M:%S')
loggerStd = logging.StreamHandler() #console output
loggerStd.setFormatter(loggerFormat)
loggerStd.setLevel(logging.DEBUG)
loggerHdlr = logging.FileHandler(logfile) #file output
loggerHdlr.setFormatter(loggerFormat)
loggerHdlr.setLevel(logging.DEBUG)
logger.addHandler(loggerStd)
logger.addHandler(loggerHdlr)
try:
dir = None
hash = ""
for item in sys.argv:
if os.path.isdir(item):
dir = os.path.abspath(item)
elif not os.path.isfile(item):
hash = item
if not dir:
logger.warning(loggerHeader + "Parameters not supplied - is this being called from uTorrrent? Check the documentation on utorrentToSickBeard.py.")
exit()
logger.debug(loggerHeader + "Torrent Dir: " + dir)
logger.debug(loggerHeader + "Torrent Hash: " + hash)
logger.info(loggerHeader + "Searching files at " + os.path.join(dir, "*"))
for file in glob.glob(os.path.join(dir, "*")):
if file.lower().endswith(('.rar', '.zip', '.7z','.avi','.mkv','.mp4')) and 'sample' not in file.lower() and '/subs' not in file.lower():
if file.lower().endswith(('.rar', '.zip', '.7z')):
file = os.path.basename(file)
filePath = os.path.join(dir, file)
if 'part' not in file.lower() or 'part01' in file.lower():
returnCode7z = -1
returnCodeUnrar = -1
try:
returnCode7z = subprocess.call(['7z', 'x', filePath, '-aos', '-o' + dir])
except:
returnCode7z = -2
if returnCode7z != 0 :
try:
returnCodeUnrar = subprocess.call(['unrar', 'x', filePath, dir])
except:
returnCodeUnrar = -2
if returnCode7z == -2 and returnCodeUnrar == -2:
logger.error(loggerHeader + "Cannot find 7z or unrar on your shell path")
sys.exit(1)
if returnCode7z != 0 and returnCodeUnrar != 0:
logger.error(loggerHeader + "Unable to extract {}".format(file))
else:
logger.info(loggerHeader + "Successfully extracted {}".format(file))
autoProcessTV.processEpisode(dir)
else:
autoProcessTV.processEpisode(dir)
logger.info(loggerHeader + "Processing from uTorrent finished.")
sys.exit(0)
except Exception, e:
logger.error(str(e))
sys.exit(1)
| VeNoMouS/Sick-Beard | autoProcessTV/utorrentToSickBeard.py | Python | gpl-3.0 | 4,105 |
import colorsys
def scale_rgb_tuple(rgb, down=True):
if not down:
return tuple([int(c*255) for c in rgb])
return tuple([round(float(c)/255, 2) for c in rgb])
def hex_to_rgb(hex_str):
if hex_str.startswith('#'):
hex_str = hex_str[1:]
return tuple([int(hex_str[i:i + 2], 16) for i in xrange(0, len(hex_str), 2)])
def hex_to_rgb_float(hex_str):
return scale_rgb_tuple(hex_to_rgb(hex_str), down=True)
def rgb_to_hex(rgb):
return ''.join(["%0.2X" % c for c in rgb])
def hsv_to_hex(hsv):
return rgb_to_hex(scale_rgb_tuple(colorsys.hsv_to_rgb(*hsv), down=False))
| ldmoray/KivyMeep | webcolors.py | Python | mit | 610 |
# Generated by Django 3.0.7 on 2020-08-03 08:42
from django.db import migrations, models
import weblate.utils.validators
class Migration(migrations.Migration):
dependencies = [
("trans", "0093_auto_20200730_1432"),
]
operations = [
migrations.AddField(
model_name="project",
name="language_aliases",
field=models.CharField(
default="",
blank=True,
help_text="Comma-separated list of language code mappings, for example: en_GB:en,en_US:en",
max_length=200,
validators=[weblate.utils.validators.validate_language_aliases],
verbose_name="Language aliases",
),
),
]
| nijel/weblate | weblate/trans/migrations/0094_project_language_aliases.py | Python | gpl-3.0 | 752 |
"""Support for sending data to Emoncms."""
import logging
from datetime import timedelta
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
CONF_API_KEY,
CONF_WHITELIST,
CONF_URL,
STATE_UNKNOWN,
STATE_UNAVAILABLE,
CONF_SCAN_INTERVAL,
)
from homeassistant.helpers import state as state_helper
from homeassistant.helpers.event import track_point_in_time
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
DOMAIN = "emoncms_history"
CONF_INPUTNODE = "inputnode"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_URL): cv.string,
vol.Required(CONF_INPUTNODE): cv.positive_int,
vol.Required(CONF_WHITELIST): cv.entity_ids,
vol.Optional(CONF_SCAN_INTERVAL, default=30): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Emoncms history component."""
conf = config[DOMAIN]
whitelist = conf.get(CONF_WHITELIST)
def send_data(url, apikey, node, payload):
"""Send payload data to Emoncms."""
try:
fullurl = "{}/input/post.json".format(url)
data = {"apikey": apikey, "data": payload}
parameters = {"node": node}
req = requests.post(
fullurl, params=parameters, data=data, allow_redirects=True, timeout=5
)
except requests.exceptions.RequestException:
_LOGGER.error("Error saving data '%s' to '%s'", payload, fullurl)
else:
if req.status_code != 200:
_LOGGER.error(
"Error saving data %s to %s (http status code = %d)",
payload,
fullurl,
req.status_code,
)
def update_emoncms(time):
"""Send whitelisted entities states regularly to Emoncms."""
payload_dict = {}
for entity_id in whitelist:
state = hass.states.get(entity_id)
if state is None or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE):
continue
try:
payload_dict[entity_id] = state_helper.state_as_number(state)
except ValueError:
continue
if payload_dict:
payload = "{%s}" % ",".join(
"{}:{}".format(key, val) for key, val in payload_dict.items()
)
send_data(
conf.get(CONF_URL),
conf.get(CONF_API_KEY),
str(conf.get(CONF_INPUTNODE)),
payload,
)
track_point_in_time(
hass, update_emoncms, time + timedelta(seconds=conf.get(CONF_SCAN_INTERVAL))
)
update_emoncms(dt_util.utcnow())
return True
| fbradyirl/home-assistant | homeassistant/components/emoncms_history/__init__.py | Python | apache-2.0 | 2,980 |
# coding=utf-8
import logging, commands, os, pexpect, sys, time
from app.modules.common.pipelines import *
from app.modules.common.tasks import *
from app.modules.common.errors import *
from app.modules.common.utils import *
from pexpect import *
log = logging.getLogger(__name__)
class ImportOraclePipeLine(Pipeline):
def __init__(self, name):
Pipeline.__init__(self, name)
log.info(run('/bin/bash -c "echo $PATH"'))
self.add_task(CheckOracleImportParams())
self.add_task(CheckOracleProcessTask())
self.add_task(CleanTempPathTask())
self.add_task(CopyDumpFileTask())
self.add_task(UnzipDumpFileTask())
self.add_task(InitOracleUserTask())
self.add_task(AfterImportTask())
class CheckOracleImportParams(Task):
def __init__(self):
Task.__init__(self, 'CheckImportParams')
def paramExists(self, key, params):
keys = params.keys()
if (key not in keys) or (params[key] is None) or ((str(params[key]).lstrip()) == ''):
return False
else:
return True
def __do_execute__(self, params={'username': None,
'password': None,
'temp_path': None,
'dump_path': None,
'db_name': None,
'clean_script_path': None,
'tablespace': None}):
log.info('********************************************************')
log.info("=======> CheckImportParams:Entered execute()")
error_message=[]
param_valid = True
if not self.paramExists('username', params):
param_valid = False
error_message.append('params[username] must not empty')
else:
log.info("=======> params[username] is %s", params['username'])
if not self.paramExists('password', params):
param_valid = False
error_message.append('params[password] must not empty')
else:
log.info("=======> params[password] is %s", params['password'])
if not self.paramExists('temp_path', params):
param_valid = False
error_message.append('params[temp_path] must not empty')
else:
log.info("=======> params[temp_path] is %s", params['temp_path'])
if not self.paramExists('dump_path', params):
param_valid = False
error_message.append('params[dump_path] must not empty')
else:
log.info("=======> params[dump_path] is %s", params['dump_path'])
if not self.paramExists('db_name', params):
params['db_name'] = params['username']
log.info("=======> params[db_name] is %s", params['db_name'])
if not self.paramExists('clean_script_path', params):
param_valid = False
error_message.append('params[clean_script_path] must not empty')
else:
log.info("=======> params[clean_script_path] is %s", params['clean_script_path'])
if not self.paramExists('tablespace', params):
params['tablespace'] = params['username']
log.info("=======> params[tablespace] is empty, set to the username : %s", params['tablespace'])
else:
log.info("=======> params[tablespace] is %s", params['tablespace'])
if not self.paramExists('source_username', params):
params['source_username'] = 'adempiere'
log.info('=======> source username is : %s', params['source_username'])
if not self.paramExists('source_tablespace', params):
params['source_tablespace'] = 'TS_ADEMPIERE'
log.info('=======> source tablespace is : %s', params['source_tablespace'])
if param_valid and not os.path.exists(params['dump_path']):
param_valid = False
error_message.append('not found dump path[%s].' % params['dump_path'])
if param_valid and not os.path.exists(params['temp_path']):
param_valid = False
error_message.append('not found temp path[%s].' % params['temp_path'])
if param_valid and len(os.listdir(params['dump_path'])) <= 0:
param_valid = False
error_message.append('dump path[%s] is an empty directory..' % params['temp_path'])
if param_valid and len(os.listdir(params['clean_script_path'])) <= 0:
param_valid = False
error_message.append('clean script path[%s] is an empty directory..' % params['clean_script_path'])
if not param_valid:
log.error("******** %s", error_message)
raise TaskParamsError(error_message)
log.info("=======> CheckImportParams:Exit execute()")
class CheckOracleProcessTask(Task):
def __init__(self):
Task.__init__(self, 'CheckMysqlProcess')
def __do_execute__(self, params={}):
log.info('********************************************************')
log.info("=======> CheckMysqlProcess:Entered execute()")
try:
sh = '/bin/bash -c "ps -ef | grep ora_ | grep -v grep | wc -l"'
log.info('=======> start check oracle proces, shell : %s', sh)
output, status = run(sh, withexitstatus=1)
log.info('=======> shell status: %s, output: %s', status, output)
if status == 0 and int(output) != 0:
log.info('=======> check oracle process success.')
else:
log.error('=======> Oracle process does not exist')
log.info('=======> try start oracle')
sqlplus = pexpect.spawn('su - oracle', timeout=10)
sqlplus.logfile = sys.stdout
sqlplus.sendline('lsnrctl start')
sqlplus.sendline('sqlplus / as sysdba')
sqlplus.expect('SQL>')
sqlplus.sendline('set head off')
sqlplus.expect('SQL>')
sqlplus.sendline('set feedback off')
sqlplus.expect('SQL>')
sqlplus.sendline('startup')
sqlplus.expect('SQL>')
sqlplus.sendline('exit')
sqlplus.close()
log.info('=======> recheck oracle process, shell : %s', sh)
output, status = run(sh, withexitstatus=1)
log.info('=======> shell status: %s, output: %s', status, output)
if status == 0 and int(output) != 0:
log.info('=======> check oracle process success.')
else:
raise TaskExcecuteError('start oracle error')
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
log.info("=======> CheckMysqlProcess:Exit execute()")
class CleanTempPathTask(Task):
def __init__(self):
Task.__init__(self, 'cleanTempPathTask')
def __do_execute__(self, params={}):
log.info('********************************************************')
log.info("=======> cleanTempPathTask:Entered execute()")
path = '%s/%s' % (params['temp_path'], params['db_name'])
sh = 'mkdir -p %s' % path
log.info('=======> start execute shell : %s', sh)
try:
output, status = run(sh, withexitstatus=1)
log.info('=======> shell status: %s, output: %s', status, output)
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure.')
log.info('=======> end execute shell')
if status != 0:
raise TaskExcecuteError('create temp path[%s] failure.', path)
else:
log.info('=======> create temp path[%s] success.', path)
log.info('********************************************************')
sh = '/bin/bash -c "rm -f %s/*.zip %s/import.log %s/*.dmp %s/*.sql %s/*.txt"' % (path, path, path, path, path)
log.info('=======> start execute shell : %s', sh)
try:
output, status = run(sh, withexitstatus=1)
log.info('=======> shell status: %s, output: %s', status, output)
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
log.info('=======> end execute shell')
if status != 0:
raise TaskExcecuteError('clean temp path[%s] failure.', path)
else:
log.info('=======> clean temp path[%s] success.', path)
log.info("=======> cleanTempPathTask:Exit execute()")
class CopyDumpFileTask(Task):
def __init__(self):
Task.__init__(self, 'CopyDumpFileTask')
def __do_execute__(self, params={}):
log.info('********************************************************')
log.info("=======> CopyDumpFileTask:Entered execute()")
path = '%s/%s' % (params['temp_path'], params['db_name'])
sh = '/bin/bash -c "cp %s/* %s"' % (params['dump_path'], path)
log.info('=======> start execute shell : %s', sh)
try:
output, status = run(sh, withexitstatus=1, timeout=300)
log.info('=======> shell status: %s, output: %s', status, output)
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
log.info('=======> end execute shell')
if status != 0:
raise TaskExcecuteError('copy dump file failure.')
else:
log.info('=======> copy dump file success.')
log.info("=======> CopyDumpFileTask:Exit execute()")
class UnzipDumpFileTask(Task):
def __init__(self):
Task.__init__(self, 'UnzipDumpFileTask')
def __do_execute__(self, params={}):
log.info('********************************************************')
log.info("=======> UnzipDumpFileTask:Entered execute()")
path = '%s/%s' % (params['temp_path'], params['db_name'])
files = os.listdir(path)
if len(files) == 0:
log.error('dump file not exists')
dump_zip = None
for file in files:
if file.endswith('.zip'):
dump_zip = file
if not dump_zip:
log.error('=======> Dump compressed file does not exist')
raise TaskExcecuteError('Dump compressed file does not exist')
params['import_dump_name'] = str(dump_zip).split('.zip')[0]
log.info("---------->dump name %s", params['import_dump_name'])
sh = '/bin/bash -c "unzip -o %s/%s -d %s"' % (path, dump_zip, path)
log.info('=======> start unzip, shell : %s', sh)
try:
output, status = run(sh, withexitstatus=1, timeout=600)
log.info('=======> shell status: %s, output: %s', status, output)
if status != 0:
raise TaskExcecuteError('upzip dump file failure.')
else:
log.info('=======> upzip dump file success.')
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
sh = '/bin/bash -c "rm -f %s/%s"' % (path, dump_zip)
log.info('=======> start clean dump zip, shell : %s', sh)
try:
output, status = run(sh, withexitstatus=1)
log.info('=======> shell status: %s, output: %s', status, output)
if status != 0:
raise TaskExcecuteError('clean dump zip failure.')
else:
log.info('=======> clean dump zip success.')
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
sh = '/bin/bash -c "chmod -R 777 %s"' % path
log.info('=======> start execute shell : %s', sh)
try:
output, status = run(sh, withexitstatus=1)
log.info('=======> shell status: %s, output: %s', status, output)
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
log.info('=======> end execute shell')
if status != 0:
raise TaskExcecuteError('chmod 777 path[%s] failure.', path)
else:
log.info('=======> chmod 777 path[%s] success.', path)
log.info("=======> UnzipDumpFileTask:Exit execute()")
class InitOracleUserTask(Task):
def __init__(self):
Task.__init__(self, 'CheckMysqlProcess')
def replaceInvalidateChars(self, temp_path, username, regex):
sh = '/bin/bash -c "sed -i \'%s\' %s/%s_disconnect.sql"' % (regex, temp_path, username)
log.info('=======> start Replace invalid character, shell : %s', sh)
try:
output, status = run(sh, withexitstatus=1)
log.info('=======> shell status: %s, output: %s', status, output)
if status != 0:
raise TaskExcecuteError('Replace invalid character failure.')
else:
log.info('=======> Replace invalid character success.')
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
def __do_execute__(self, params={}):
log.info('********************************************************')
log.info("=======> InitOracleUserTask:Entered execute()")
db_name = params['db_name']
temp_path = '%s/%s' % (params['temp_path'], params['db_name'])
username = str(params['username']).upper()
password = params['password']
tablespace = params['tablespace']
dump_path_name = username + '_dump_path'
source_username = params['source_username']
source_tablespace = params['source_tablespace']
try:
sqlplus = pexpect.spawn('su - oracle', timeout=None)
sqlplus.logfile = sys.stdout
sqlplus.expect('$')
sqlplus.sendline('sqlplus / as sysdba')
sqlplus.expect('SQL>')
sqlplus.sendline('set head off')
sqlplus.expect('SQL>')
sqlplus.sendline('set feedback off')
sqlplus.expect('SQL>')
sqlplus.sendline('SELECT USERNAME || \'_\' FROM ALL_USERS where USERNAME = \'%s\';' % username)
index = sqlplus.expect([username + '_', pexpect.TIMEOUT], timeout=None)
log.info('=====> index : %s', sqlplus.before)
log.info('=====> index : %s', index)
if index == 0:
sqlplus.sendline('alter user %s account lock;' % username)
sqlplus.expect('SQL>')
sqlplus.sendline('spool %s/%s_disconnect.sql;' % (temp_path, username))
sqlplus.sendline('select \'alter system kill session \'\'\' || sid ||\',\'||serial#||\'\'\' immediate;\' from v$session where username = UPPER(\'%s\');' % username)
sqlplus.expect('SQL>')
sqlplus.sendline('spool off')
sqlplus.expect('SQL>')
self.replaceInvalidateChars(temp_path, username, 's/SQL>.*//g')
self.replaceInvalidateChars(temp_path, username, 's/new\s\{3\}.*//g')
self.replaceInvalidateChars(temp_path, username, 's/old\s\{3\}.*//g')
sqlplus.sendline('@%s/%s_disconnect.sql;' % (temp_path, username))
sqlplus.expect('SQL>', timeout=None)
elif index == 1:
log.info('user[%s] not exists.', username)
sqlplus.sendline('DROP USER %s CASCADE;' % username)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('DROP TABLESPACE %s INCLUDING CONTENTS AND DATAFILES;' % tablespace)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline(
'create tablespace %s datafile \'/u01/app/oracle/oradata/orcldb/%s\' size 400M autoextend on next 10m maxsize unlimited;' % (
tablespace, tablespace))
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('CREATE USER %s IDENTIFIED BY %s default tablespace %s account unlock;' % (username,
password,
tablespace))
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('GRANT CONNECT,RESOURCE,DBA,UNLIMITED TABLESPACE,CREATE TABLE TO %s;' % username)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('ALTER USER %s DEFAULT ROLE CONNECT, RESOURCE, DBA;' % username)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('grant READ, WRITE ON directory erpdump TO %s;' % username)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('grant create any job to %s;' % username)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('DROP DIRECTORY %s;' % dump_path_name)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('create directory %s as \'%s\';' % (dump_path_name, temp_path))
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('grant read,write on directory %s to %s;' % (dump_path_name, username))
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendline('alter user %s account unlock;' % username)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendcontrol('d')
sqlplus.expect('$')
remap = 'remap_schema=%s:%s remap_tablespace=%s:%s' % (source_username,
username,
source_tablespace,
tablespace)
sh = 'impdp %s/%s dumpfile=%s.dmp DIRECTORY=%s %s' % (username,
password,
params['import_dump_name'],
dump_path_name,
remap)
log.info('======> Start execute oracle import : %s', sh)
sqlplus.sendline(sh)
sqlplus.expect('Job "%s"."(.*)" completed with' % username, timeout=None)
log.info('======> End execute oracle import ')
time.sleep(5)
sqlplus.close()
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
log.info("=======> InitOracleUserTask:Exit execute()")
class AfterImportTask(Task):
def __init__(self):
Task.__init__(self, 'AfterImportTask')
def __do_execute__(self, params={'after_imp_sql_files': None}):
log.info('********************************************************')
log.info("=======> AfterImportTask:Entered execute()")
try:
if 'after_imp_sql_files' in params.keys():
sqls = params['after_imp_sql_files']
if Converter.typeof_list_or_set(sqls) and len(sqls) > 0:
log.info('=======> Start read&write permissions to the script')
for sql in sqls:
sh = '/bin/bash -c "chmod 777 %s"' % sql
log.info('=======> execute shell : %s', sh)
run(sh, withexitstatus=1)
log.info('=======> end read&write permissions to the script')
sqlplus = pexpect.spawn('su - oracle', timeout=10)
sqlplus.logfile = sys.stdout
sqlplus.expect('$')
sqlplus.sendline('sqlplus / as sysdba')
sqlplus.expect('SQL>')
for sql in sqls:
log.info('=====> Start executing SQL script file: %s', sql)
sqlplus.sendline('@%s;' % sql)
sqlplus.expect('SQL>', timeout=None)
sqlplus.sendcontrol('d')
sqlplus.close()
else:
log.info('=======> There is no need to execute the SQL script file')
else:
log.info('=======> There is no need to execute the SQL script file')
except BaseException, e:
log.error('=======> ' + e.message)
raise TaskExcecuteError('execute shell failure, cause: %s.' % e.message)
log.info("=======> AfterImportTask:Exit execute()") | ssls/beetle-agent | app/modules/database/pipelines_oracle.py | Python | mit | 21,156 |
import _plotly_utils.basevalidators
class AlignValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="align", parent_name="sankey.node.hoverlabel", **kwargs
):
super(AlignValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["left", "right", "auto"]),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/sankey/node/hoverlabel/_align.py | Python | mit | 588 |
from django.contrib import admin
from .models import Member, Rol, Theatre, Genre, Play, Image
admin.site.register(Member)
admin.site.register(Rol)
admin.site.register(Theatre)
admin.site.register(Genre)
admin.site.register(Play)
admin.site.register(Image)
| nicolas471/consultorio | web/admin.py | Python | gpl-3.0 | 257 |
# Copyright 2020 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Flags: -expose-wasm --wasm-gdb-remote --wasm-pause-waiting-for-debugger test/debugging/wasm/gdb-server/test_files/test_memory.js
import struct
import sys
import unittest
import gdb_rsp
import test_files.test_memory as test_memory
# These are set up by Main().
COMMAND = None
class Tests(unittest.TestCase):
# Test that reading from an unreadable address gives a sensible error.
def CheckReadMemoryAtInvalidAddr(self, connection):
mem_addr = 0xffffffff
result = connection.RspRequest('m%x,%x' % (mem_addr, 1))
self.assertEquals(result, 'E02')
def RunToWasm(self, connection, breakpoint_addr):
# Set a breakpoint.
reply = connection.RspRequest('Z0,%x,1' % breakpoint_addr)
self.assertEqual(reply, 'OK')
# When we run the program, we should hit the breakpoint.
reply = connection.RspRequest('c')
gdb_rsp.AssertReplySignal(reply, gdb_rsp.SIGTRAP)
# Remove the breakpoint.
reply = connection.RspRequest('z0,%x,1' % breakpoint_addr)
self.assertEqual(reply, 'OK')
def test_reading_and_writing_memory(self):
with gdb_rsp.LaunchDebugStub(COMMAND) as connection:
module_load_addr = gdb_rsp.GetLoadedModuleAddress(connection)
breakpoint_addr = module_load_addr + test_memory.FUNC0_START_ADDR
self.RunToWasm(connection, breakpoint_addr)
self.CheckReadMemoryAtInvalidAddr(connection)
# Check reading code memory space.
expected_data = b'\0asm'
result = gdb_rsp.ReadCodeMemory(connection, module_load_addr, len(expected_data))
self.assertEqual(result, expected_data)
# Check reading instance memory at a valid range.
reply = connection.RspRequest('qWasmMem:0;%x;%x' % (32, 4))
value = struct.unpack('I', gdb_rsp.DecodeHex(reply))[0]
self.assertEquals(int(value), 0)
# Check reading instance memory at an invalid range.
reply = connection.RspRequest('qWasmMem:0;%x;%x' % (0xf0000000, 4))
self.assertEqual(reply, 'E03')
def test_wasm_global(self):
with gdb_rsp.LaunchDebugStub(COMMAND) as connection:
module_load_addr = gdb_rsp.GetLoadedModuleAddress(connection)
breakpoint_addr = module_load_addr + test_memory.FUNC0_START_ADDR
self.RunToWasm(connection, breakpoint_addr)
# Check reading valid global.
reply = connection.RspRequest('qWasmGlobal:0;0')
value = struct.unpack('I', gdb_rsp.DecodeHex(reply))[0]
self.assertEqual(0, value)
# Check reading invalid global.
reply = connection.RspRequest('qWasmGlobal:0;9')
self.assertEqual("E03", reply)
def test_wasm_call_stack(self):
with gdb_rsp.LaunchDebugStub(COMMAND) as connection:
module_load_addr = gdb_rsp.GetLoadedModuleAddress(connection)
breakpoint_addr = module_load_addr + test_memory.FUNC0_START_ADDR
self.RunToWasm(connection, breakpoint_addr)
reply = connection.RspRequest('qWasmCallStack')
stack = gdb_rsp.DecodeUInt64Array(reply)
assert(len(stack) > 2) # At least two Wasm frames, plus one or more JS frames.
self.assertEqual(stack[0], module_load_addr + test_memory.FUNC0_START_ADDR)
self.assertEqual(stack[1], module_load_addr + test_memory.FUNC1_RETURN_ADDR)
def Main():
index = sys.argv.index('--')
args = sys.argv[index + 1:]
# The remaining arguments go to unittest.main().
global COMMAND
COMMAND = args
unittest.main(argv=sys.argv[:index])
if __name__ == '__main__':
Main()
| youtube/cobalt | third_party/v8/test/debugging/wasm/gdb-server/memory.py | Python | bsd-3-clause | 3,599 |
# bigbob2b.py
# Case Study 2 - multiple robots (method 2)
import math, sys, os
sys.path.append('/usr/local/lib/python2.7/site-packages/')
from playercpp import *
# Make proxies for Client, Sonar, Position2d
robot = PlayerClient("localhost",6665);
sp = RangerProxy(robot,0);
lp = RangerProxy(robot,1);
pp = Position2dProxy(robot,0);
sp2 = RangerProxy(robot,2);
lp2 = RangerProxy(robot,3);
pp2 = Position2dProxy(robot,1);
while True:
# Read from proxies
robot.Read()
# Print out sonars for fun
print "Sonar scan (robot 1, %d ranges): " % sp.GetRangeCount(),
for i in range(sp.GetRangeCount()):
print '%.2f' % sp.GetRange(i),
print '.'
# Print out lasers for fun
print "Laser scan (robot 1, %d ranges): " % lp.GetRangeCount(),
for i in range(lp.GetRangeCount()):
print '%.2f' % lp.GetRange(i),
print '.'
# do simple collision avoidance
short = 0.5;
if sp.GetRange(0) < short or sp.GetRange(2)<short:
turnrate = math.radians(-20); # Turn 20 degrees persecond
elif sp.GetRange(1) <short or sp.GetRange(3)<short:
turnrate = math.radians(20)
else:
turnrate = 0;
if sp.GetRange(0) < short or sp.GetRange(1) < short:
speed = 0;
else:
speed = 0.100;
# Command the motors
pp.SetSpeed(speed, turnrate);
if sp2.GetRangeCount()==0:
continue
if lp2.GetRangeCount()==0:
continue
# Print out sonars for fun
print "Sonar scan (robot 2), %d ranges: " % sp2.GetRangeCount(),
for i in range(sp2.GetRangeCount()):
print '%.2f' % sp2.GetRange(i),
print '.'
# Print out lasers for fun
print "Laser scan (robot 2), %d ranges: " % lp2.GetRangeCount(),
for i in range(lp2.GetRangeCount()):
print '%.2f' % lp2.GetRange(i),
print '.'
# do simple collision avoidance
short = 0.5;
if sp2.GetRange(0) < short or sp2.GetRange(2)<short:
turnrate = math.radians(-20); # Turn 20 degrees persecond
elif sp2.GetRange(1) <short or sp2.GetRange(3)<short:
turnrate = math.radians(20)
else:
turnrate = 0;
if sp2.GetRange(0) < short or sp2.GetRange(1) < short:
speed = 0;
else:
speed = 0.100;
# Command the motors
pp2.SetSpeed(speed, turnrate);
| lsa-pucrs/Player-Stage-Manual | code/Ch8.6/bigbob2b.py | Python | gpl-3.0 | 2,088 |
import ocl
import camvtk
import time
import vtk
import datetime
import math
if __name__ == "__main__":
p = ocl.Epos()
print("initial Epos() is ",p, "with dia=",p.d)
for n in range(0,30):
print(p.d," : ",p)
p.d = p.d - 0.25
p.setD()
raw_input("Press Enter to terminate")
| aewallin/opencamlib | examples/python/offset-ellipse/epos_test.py | Python | lgpl-2.1 | 336 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Author: Norin (copied it)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import xbmc,xbmcplugin,xbmcaddon,xbmcgui,sys,os,urllib,xbmcvfs
from common_variables import *
from iofile import *
from kkplayer import *
#Function to build and return an episode item | tupple (url,listitem,isFolder)
def build_episode_item(name,url,mode,iconimage,page,info,video_info,audio_info):
videoid = url
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&page="+str(page)
ok=True
liz=xbmcgui.ListItem(name)
cm = []
cm.append((translate(30005), 'XBMC.Action(Info)'))
if info["playcount"] == 1: cm.append((translate(30007), 'XBMC.RunPlugin(%s?mode=7&url=%s)' % (sys.argv[0],videoid)))
else: cm.append((translate(30006), 'XBMC.RunPlugin(%s?mode=6&url=%s)' % (sys.argv[0],videoid)))
liz.setArt({ 'thumb': iconimage, 'banner' : os.path.join(artfolder,'banner.png'), 'fanart': os.path.join(addonfolder,'fanart.jpg') })
liz.setPath(u)
liz.setInfo( type="Video", infoLabels=info)
liz.addStreamInfo('video', video_info)
liz.addStreamInfo('audio', audio_info)
liz.addContextMenuItems(cm, replaceItems=False)
return (u,liz,False)
#Function to add a regular directory
def addDir(name,url,mode,iconimage,page,number_of_items,token,pasta=True):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&page="+str(page)+"&token="+urllib.quote_plus(token)
ok=True
liz=xbmcgui.ListItem(name)
liz.setInfo( type="Video", infoLabels={ "Title": name })
liz.setArt({ 'thumb': iconimage, 'banner' : os.path.join(artfolder,'banner.png'), 'fanart': os.path.join(addonfolder,'fanart.jpg') })
liz.setPath(u)
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=pasta,totalItems=number_of_items)
return ok
| Norin-Radd/plugin.video.pitchfork | resources/lib/directory.py | Python | gpl-2.0 | 2,439 |
import sys
import math
import random
import os
import wx
import wx.grid
from layout import CircleLayout
import justify
from renderer import WxLayoutRenderer
from tree_editor import TreeEditor
import tree_editor
import tree
VERSION = '0.1'
PROPERTIES=['hue', 'layout']
class EditNodeDialog(wx.Dialog):
def __init__(
self, content, properties, parent, id=-1, title="Edit node", size=wx.DefaultSize, pos=wx.DefaultPosition,
style=wx.DEFAULT_DIALOG_STYLE|wx.RESIZE_BORDER,
):
wx.Dialog.__init__(self, parent, id, title=title, size=size, style=style)
sizer = wx.BoxSizer(wx.VERTICAL)
label = wx.StaticText(self, -1, "Node content")
sizer.Add(label, 0, wx.ALL, 5)
self.content_control = wx.TextCtrl(self, -1, content, size=(80,-1), style=wx.TE_MULTILINE)
sizer.Add(self.content_control, 1, wx.EXPAND|wx.ALL, 5)
label = wx.StaticText(self, -1, "Properties")
sizer.Add(label, 0, wx.ALL, 5)
self.property_grid = wx.grid.Grid(self, -1)
sizer.Add(self.property_grid, 2, wx.EXPAND|wx.ALL, 5)
self.property_grid.CreateGrid(len(properties)+1,2)
self.property_grid.SetColLabelValue(0, "Name")
self.property_grid.SetColLabelValue(1, "Value")
self.prop_name_editor = wx.grid.GridCellChoiceEditor(PROPERTIES, True)
i = 0
for k,v in properties.iteritems():
self.property_grid.SetCellValue(i, 0, k)
self.property_grid.SetCellValue(i, 1, v)
self.property_grid.SetCellEditor(i, 0, self.prop_name_editor)
i += 1
self.property_grid.SetCellEditor(i, 0, self.prop_name_editor)
btnsizer = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_OK)
btn.SetDefault()
btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
btnsizer.AddButton(btn)
btnsizer.Realize()
sizer.Add(btnsizer, 0, wx.ALIGN_CENTER|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
self.Bind(wx.grid.EVT_GRID_CELL_CHANGE, self.OnGridCellChange)
self.SetSizer(sizer)
sizer.Fit(self)
def OnGridCellChange(self, evt):
if evt.GetRow() >= self.property_grid.GetNumberRows()-1:
self.property_grid.AppendRows(1)
self.property_grid.SetCellEditor(evt.GetRow()+1, 0, self.prop_name_editor)
def GetNewContent(self):
return self.content_control.GetValue()
def GetNewProperties(self):
props = {}
for i in range(self.property_grid.GetNumberRows()):
k = self.property_grid.GetCellValue(i, 0)
v = self.property_grid.GetCellValue(i, 1)
if k != '' and v != '':
props[k] = v
return props
class LayoutPanel(wx.Panel):
def __init__(self,
parent=None, ID=-1
):
wx.Panel.__init__(self, parent, size=(1,1), style=wx.WANTS_CHARS)
self.Bind(wx.EVT_MOUSEWHEEL, self.OnMouseScroll)
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
self.Bind(wx.EVT_LEFT_UP, self.OnMouseLeftUp)
self.Bind(wx.EVT_LEFT_DCLICK, self.OnMouseLeftDoubleClick)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda evt: None)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, lambda e: self.Refresh())
self.drag_pos = None
self.renderer = WxLayoutRenderer()
self.editor = TreeEditor(self)
def SetTree(self, tree, filename):
self.layout = CircleLayout(tree)
self.layout.get_text_size = get_text_size
self.layout.run()
self.renderer.layout = self.layout
self.filename = filename
self.dirty = False
self.show_bounding_circles = False
self.auto_relayout = True
self.scroll_x = 0
self.scroll_y = 0
self.zoom = 1.0
self.selected_node = None
self.ReLayout()
def OnMouseScroll(self, evt):
if evt.CmdDown():
r = 1.125**(evt.GetWheelRotation()/120.0)
self.zoom *= r
self.scroll_x *= r
self.scroll_y *= r
self.Refresh()
def OnMouseMove(self, evt):
if not evt.Dragging():
self.drag_pos = None
else:
self.CaptureMouse()
if not self.drag_pos:
self.drag_pos = evt.GetPosition()
self.start_x,self.start_y = self.scroll_x,self.scroll_y
else:
displacement = evt.GetPosition() - self.drag_pos
self.scroll_x = self.start_x + displacement.x
self.scroll_y = self.start_y + displacement.y
self.Refresh()
self.ReleaseMouse()
def OnMouseLeftUp(self, evt):
if self.drag_pos is not None:
self.drag_pos = None
return
x,y = evt.GetPositionTuple()
x,y = self.renderer.screen_to_coord(x, y)
n = self.layout.find_node(x,y)
self.SelectNode(n)
def OnMouseLeftDoubleClick(self, evt):
self.OnMouseLeftUp(evt)
self.OnEditNode(evt)
def OnKeyDown(self, evt):
if evt.CmdDown():
if evt.GetKeyCode() == wx.WXK_HOME:
self.SelectNode(self.layout.root)
elif evt.GetKeyCode() in [wx.WXK_LEFT, wx.WXK_RIGHT]:
self.MoveNode(evt.GetKeyCode())
else:
evt.Skip()
return
if evt.GetKeyCode() in [wx.WXK_UP, wx.WXK_DOWN, wx.WXK_LEFT, wx.WXK_RIGHT]:
self.MoveSelection(evt.GetKeyCode())
elif evt.GetKeyCode() == wx.WXK_F9:
self.ReLayout()
elif evt.GetKeyCode() == wx.WXK_BACK and self.selected_node is not None:
self.selected_node = self.selected_node.parent
self.ReLayout()
elif evt.GetKeyCode() == ord('B'):
self.show_bounding_circles = not self.show_bounding_circles
self.Refresh()
elif evt.GetKeyCode() == ord('P'):
self.Pivot()
elif evt.GetKeyCode() == ord('R'):
self.SetRoot()
else:
evt.Skip()
def OnPaint(self, evt):
s = self.GetSize()
dc = wx.MemoryDC()
dc.SelectObject(wx.EmptyBitmap(s.x, s.y))
dc.SetBrush(wx.WHITE_BRUSH)
dc.Clear()
gc = wx.GraphicsContext.Create(dc)
self.renderer.zoom = self.zoom
self.renderer.scroll_x = self.scroll_x
self.renderer.scroll_y = self.scroll_y
self.renderer.selected_node = self.selected_node
self.renderer.show_bounding = self.show_bounding_circles
self.renderer.gc = gc
self.renderer.dc = dc
self.renderer.render()
dc2 = wx.PaintDC(self)
dc2.Blit(0,0,s.x,s.y,dc,0,0)
del dc2
def MoveSelection(self, key_code):
if self.selected_node is None:
return
node = self.selected_node
all_neighbours = []
if node.parent is not None:
all_neighbours.append(node.parent)
all_neighbours.extend(node.children)
if node.parent is not None:
all_neighbours.extend([c for c in node.parent.children if c != node])
relevant_neighbours = []
x,y = self.layout.positions[node]
for n in all_neighbours:
nx,ny = self.layout.positions[n]
dx = nx - x
dy = ny - y
if ((-dx > abs(dy) and key_code == wx.WXK_LEFT)
or (dx > abs(dy) and key_code == wx.WXK_RIGHT)
or (-dy > abs(dx) and key_code == wx.WXK_UP)
or (dy > abs(dx) and key_code == wx.WXK_DOWN)):
relevant_neighbours.append(n)
if len(relevant_neighbours) > 0:
new_node = relevant_neighbours[0]
self.SelectNode(new_node)
def SelectNode(self, node):
if node != self.selected_node:
self.selected_node = node
self.Refresh()
def Pivot(self):
if self.selected_node is None or self.selected_node.parent is None:
return
mutation = tree_editor.PivotMutation(self.layout, self.selected_node)
self.selected_node = mutation.parent
self.editor.perform(mutation)
self.dirty = True
def SetRoot(self):
if self.selected_node is None or self.selected_node.parent is None:
return
mutation = tree_editor.SetRootMutation(self.layout, self.selected_node)
self.selected_node = mutation.old_root
self.editor.perform(mutation)
self.dirty = True
def MoveNode(self, key_code):
if self.selected_node is None or self.selected_node.parent is None:
return
if key_code == wx.WXK_LEFT and self.selected_node != self.selected_node.parent.children[0]:
direction = -1
elif key_code == wx.WXK_RIGHT and self.selected_node != self.selected_node.parent.children[-1]:
direction = 1
else:
return
mutation = tree_editor.MoveMutation(self.layout, self.selected_node, direction)
self.editor.perform(mutation)
self.dirty = True
def OnZoomAll(self, evt):
diameter = self.layout.root.bounding_radius*2
self.scroll_x,self.scroll_y = 0,0
self.zoom = min(self.renderer.width, self.renderer.height)/float(diameter)
self.Refresh()
def OnUndo(self, evt):
self.editor.undo()
def OnRedo(self, evt):
self.editor.redo()
def OnCopy(self, evt):
if self.selected_node is None:
return
target = self.selected_node
node_text = tree.to_string(target)
clipdata = wx.TextDataObject()
clipdata.SetText(node_text)
wx.TheClipboard.Open()
wx.TheClipboard.SetData(clipdata)
wx.TheClipboard.Close()
def OnCut(self, evt):
if self.selected_node is None:
return
self.OnCopy(evt)
self.OnDeleteNode(evt)
def OnPaste(self, evt):
target = self.selected_node
if not target:
target = self.layout.root
if wx.TheClipboard.IsOpened():
return
do = wx.TextDataObject()
wx.TheClipboard.Open()
success = wx.TheClipboard.GetData(do)
wx.TheClipboard.Close()
if not success:
return
node = tree.from_string(do.GetText())
mutation = tree_editor.InsertMutation(self.layout, target, node)
self.editor.perform(mutation)
self.dirty = True
def OnDeleteNode(self, evt):
if self.selected_node is not None and self.selected_node != self.layout.root:
mutation = tree_editor.DeleteMutation(self.layout, self.selected_node)
self.selected_node = None
self.editor.perform(mutation)
self.dirty = True
def OnEditNode(self, evt):
if self.selected_node is None:
return
#new_text = wx.GetTextFromUser("Edit node content", "Edit node", self.selected_node.content.replace('\n', ' '))
dlg = EditNodeDialog(self.selected_node.content, self.selected_node.properties, self.GetParent())
dlg.Show()
val = dlg.ShowModal()
new_text = dlg.GetNewContent()
new_props = dlg.GetNewProperties()
dlg.Destroy()
if val != wx.ID_OK:
return
if len(new_text) == 0:
return
points = justify.get_points(new_text)
all_js = justify.justify_text(points, 2)
j = all_js[0][1]
new_text = justify.render_text(new_text, j)
mutation = tree_editor.EditMutation(self.layout, self.selected_node, new_text, new_props)
self.editor.perform(mutation)
self.dirty = True
def OnAddNode(self, evt):
if self.selected_node is None:
return
new_text = wx.GetTextFromUser("Enter new node content", "New node", '')
if len(new_text) == 0:
return
points = justify.get_points(new_text)
all_js = justify.justify_text(points, 2)
j = all_js[0][1]
new_text = justify.render_text(new_text, j)
n = tree.Node(new_text)
mutation = tree_editor.InsertMutation(self.layout, self.selected_node, n)
self.editor.perform(mutation)
self.dirty = True
wx.PostEvent(self, evt)
def ReLayout(self):
self.layout.run()
self.Refresh()
class MainFrame(wx.Frame):
def __init__(self,
parent=None, ID=-1, pos=wx.DefaultPosition,
size=wx.Size(800,600), style=wx.DEFAULT_FRAME_STYLE
):
title = "Snowflake"
wx.Frame.__init__(self, parent, ID, title, pos, size, style)
self.panel = LayoutPanel(self)
# Menu bar
menu_bar = wx.MenuBar()
file_menu = wx.Menu()
file_menu.Append(wx.ID_NEW, "&New\tCtrl-N", "Create a new tree")
file_menu.Append(wx.ID_OPEN, "&Open...\tCtrl-O", "Open a tree")
file_menu.Append(wx.ID_SAVE, "&Save\tCtrl-S", "Save this tree")
file_menu.Append(wx.ID_SAVEAS, "Save &As...", "Save this tree under a different filename")
file_menu.AppendSeparator()
import_menu = wx.Menu()
freemind_id = wx.NewId()
import_menu.Append(freemind_id, "&FreeMind...", "Import a FreeMind mindmap tree")
file_menu.AppendMenu(wx.NewId(), "&Import", import_menu)
file_menu.AppendSeparator()
file_menu.Append(wx.ID_EXIT, "E&xit\tAlt-F4", "Exit this program")
menu_bar.Append(file_menu, "&File")
self.Bind(wx.EVT_MENU, self.OnFileNew, id=wx.ID_NEW)
self.Bind(wx.EVT_MENU, self.OnFileOpen, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.OnFileSave, id=wx.ID_SAVE)
self.Bind(wx.EVT_MENU, self.OnFileSaveAs, id=wx.ID_SAVEAS)
self.Bind(wx.EVT_MENU, self.OnImportFreeMind, id=freemind_id)
self.Bind(wx.EVT_MENU, self.OnFileExit, id=wx.ID_EXIT)
edit_menu = wx.Menu()
edit_menu.Append(wx.ID_UNDO, "&Undo\tCtrl-Z", "Undo the last edit operation")
edit_menu.Append(wx.ID_REDO, "&Redo\tCtrl-Y", "Redo the last edit operation")
edit_menu.AppendSeparator()
edit_menu.Append(wx.ID_CUT, "Cu&t\tCtrl-X", "Cut the selected node from the tree")
edit_menu.Append(wx.ID_COPY, "&Copy\tCtrl-C", "Copy the selected node")
edit_menu.Append(wx.ID_PASTE, "&Paste\tCtrl-V", "Paste a child into the selected node")
edit_menu.AppendSeparator()
add_node_id = wx.NewId()
edit_menu.Append(add_node_id, "&Add node\tInsert", "Add a new child to the selected node")
edit_node_id = wx.NewId()
edit_menu.Append(edit_node_id, "&Edit node\tF2", "Edit the selected node's text or properties")
delete_node_id = wx.NewId()
edit_menu.Append(delete_node_id, "&Delete node\tDelete", "Delete the selected node")
menu_bar.Append(edit_menu, "&Edit")
self.Bind(wx.EVT_MENU, self.panel.OnUndo, id=wx.ID_UNDO)
self.Bind(wx.EVT_MENU, self.panel.OnRedo, id=wx.ID_REDO)
self.Bind(wx.EVT_MENU, self.panel.OnCut, id=wx.ID_CUT)
self.Bind(wx.EVT_MENU, self.panel.OnCopy, id=wx.ID_COPY)
self.Bind(wx.EVT_MENU, self.panel.OnPaste, id=wx.ID_PASTE)
self.Bind(wx.EVT_MENU, self.panel.OnAddNode, id=add_node_id)
self.Bind(wx.EVT_MENU, self.panel.OnEditNode, id=edit_node_id)
self.Bind(wx.EVT_MENU, self.panel.OnDeleteNode, id=delete_node_id)
view_menu = wx.Menu()
view_menu.Append(wx.ID_ZOOM_FIT, "&Zoom all\tZ", "Zoom to show the entire tree")
menu_bar.Append(view_menu, "&View")
self.Bind(wx.EVT_MENU, self.panel.OnZoomAll, id=wx.ID_ZOOM_FIT)
help_menu = wx.Menu()
help_menu.Append(wx.ID_ABOUT, "&About...", "About this program")
menu_bar.Append(help_menu, "&Help")
self.Bind(wx.EVT_MENU, self.OnHelpAbout, id=wx.ID_ABOUT)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.SetMenuBar(menu_bar)
# Status bar
self.CreateStatusBar()
self.SetStatusText("Snowflake Tree Editor")
def OnFileNew(self, event):
if not self.CheckUnsaved(event):
return
t = tree.Node("Start")
self.panel.SetTree(t, None)
self.SetTitle("Snowflake")
def OnFileOpen(self, event):
if not self.CheckUnsaved(event):
return
wildcard = "Tree text file|*.txt"
dlg = wx.FileDialog(
self, message="Choose a file...", defaultDir=os.getcwd(),
defaultFile="", wildcard=wildcard, style=wx.OPEN | wx.CHANGE_DIR
)
if dlg.ShowModal() == wx.ID_OK:
paths = dlg.GetPaths()
filename = paths[0]
t = tree.load(filename)
self.panel.SetTree(t, filename)
self.SetTitle("Snowflake - " + self.panel.filename)
dlg.Destroy()
def OnFileSave(self, event):
if self.panel.filename is None:
self.OnFileSaveAs(event)
return
tree.save(self.panel.layout.root, self.panel.filename)
self.panel.dirty = False
def OnFileSaveAs(self, event):
wildcard = "Tree text file|*.txt"
if self.panel.filename is None:
filename = ""
else:
filename = self.panel.filename
dlg = wx.FileDialog(
self, message="Save file as ...", defaultDir=os.getcwd(),
defaultFile="", wildcard=wildcard, style=wx.SAVE
)
if dlg.ShowModal() == wx.ID_OK:
paths = dlg.GetPaths()
filename = paths[0]
self.panel.filename = filename
tree.save(self.panel.layout.root, self.panel.filename)
self.panel.dirty = False
self.SetTitle("Snowflake - " + self.panel.filename)
dlg.Destroy()
def OnClose(self, event):
if not self.CheckUnsaved(event):
event.Veto()
return
self.Destroy()
def OnImportFreeMind(self, event):
if not self.CheckUnsaved(event):
return
wildcard = "FreeMind mindmap|*.mm"
dlg = wx.FileDialog(
self, message="Choose a file...", defaultDir=os.getcwd(),
defaultFile="", wildcard=wildcard, style=wx.OPEN | wx.CHANGE_DIR
)
if dlg.ShowModal() == wx.ID_OK:
paths = dlg.GetPaths()
filename = paths[0]
t = tree.load(filename)
self.panel.SetTree(t, None)
self.SetTitle("Snowflake")
dlg.Destroy()
def OnHelpAbout(self, event):
info = wx.AboutDialogInfo()
info.Name = 'Snowflake Tree Editor'
info.Version = VERSION
info.Copyright = 'Copyright (C) 2010, Edmund Horner'
info.Developers = ['Edmund Horner']
info.Description = 'An editor for simple trees, such as taxonomies or mindmaps.'
info.WebSite = 'http://homepages.paradise.net.nz/~ejrh/'
info.License = 'TBD'
wx.AboutBox(info)
def OnFileExit(self, event):
if not self.CheckUnsaved(event):
return
self.panel.dirty = False
self.Close()
def CheckUnsaved(self, event):
if not self.panel.dirty:
return True
answer = wx.MessageBox("Save unsaved changes?", "Unsaved changes",
wx.YES_NO | wx.CANCEL, self)
if answer == wx.CANCEL:
return False
elif answer == wx.NO:
return True
self.OnFileSave(event)
return not self.panel.dirty
get_text_size_dc = None
get_text_size_font = None
def get_text_size(t, sc):
global get_text_size_dc, get_text_size_font
if get_text_size_dc is None:
get_text_size_dc = wx.MemoryDC()
get_text_size_font = wx.Font(12, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
get_text_size_dc.SetFont(get_text_size_font)
lines = t.split('\n')
maxw = 0
maxh = 0
for l in lines:
w,h = get_text_size_dc.GetTextExtent(l)
if w > maxw:
maxw = w
maxh += h
hyp = math.sqrt(maxw*maxw + maxh*maxh)
return hyp*sc
def main():
t = tree.Node("Start")
app = wx.PySimpleApp()
frame = MainFrame()
frame.panel.SetTree(t, None)
frame.Show(True)
app.MainLoop()
if __name__ == "__main__":
try:
import psyco
psyco.full()
except ImportError:
pass
main()
| ejrh/snowflake | main.py | Python | gpl-2.0 | 21,164 |
##
# Copyright 2009-2017 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for python-meep, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import glob
import os
import shutil
import tempfile
from easybuild.easyblocks.generic.pythonpackage import PythonPackage
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import extract_file, rmtree2
from easybuild.tools.modules import get_software_root
from easybuild.tools.run import run_cmd
class EB_python_minus_meep(PythonPackage):
"""
Support for building and installing python-meep
"""
def configure_step(self):
"""Just check whether dependencies (Meep, Python) are available."""
# make sure that required dependencies are loaded
deps = ["Meep", "Python"]
for dep in deps:
if not get_software_root(dep):
raise EasyBuildError("Module for %s not loaded.", dep)
super(EB_python_minus_meep, self).configure_step()
def build_step(self):
"""Build python-meep using available make/make-mpi script."""
# determine make script arguments
meep = get_software_root('Meep')
meepinc = os.path.join(meep, 'include')
meeplib = os.path.join(meep, 'lib')
numpyinc = os.path.join(get_software_root('Python'), self.pylibdir, 'numpy', 'core', 'include')
# determine suffix for make script
suff = ''
if self.toolchain.options.get('usempi', None):
suff = '-mpi'
# run make script
cmd = "./make%s -I%s,%s -L%s" % (suff, meepinc, numpyinc, meeplib)
run_cmd(cmd, log_all=True, simple=True)
def install_step(self):
"""
Install by unpacking tarball in dist directory,
and copying site-packages dir to installdir.
"""
# locate tarball
tarball = None
shortver = '.'.join(self.version.split('.')[0:2])
fn_pattern = os.path.join(self.cfg['start_dir'],
'dist',
"%s-%s.*.tar.gz" % (self.name, shortver))
matches = glob.glob(fn_pattern)
if not matches:
raise EasyBuildError("No tarball found at %s", fn_pattern)
elif len(matches) > 1:
raise EasyBuildError("Multiple matches found for tarball: %s", matches)
else:
tarball = matches[0]
self.log.info("Tarball found at %s" % tarball)
# unpack tarball to temporary directory
tmpdir = tempfile.mkdtemp()
srcdir = extract_file(tarball, tmpdir)
if not srcdir:
raise EasyBuildError("Unpacking tarball %s failed?", tarball)
# locate site-packages dir to copy by diving into unpacked tarball
src = srcdir
while len(os.listdir(src)) == 1:
src = os.path.join(src, os.listdir(src)[0])
if not os.path.basename(src) =='site-packages':
raise EasyBuildError("Expected to find a site-packages path, but found something else: %s", src)
# copy contents of site-packages dir
dest = os.path.join(self.installdir, 'site-packages')
try:
shutil.copytree(src, dest)
rmtree2(tmpdir)
os.chdir(self.installdir)
except OSError, err:
raise EasyBuildError("Failed to copy directory %s to %s: %s", src, dest, err)
def sanity_check_step(self):
custom_paths = {
'files':["site-packages/meep_mpi.py"],
'dirs':[]
}
self.options['modulename'] = 'meep_mpi'
return super(EB_python_minus_meep, self).sanity_check_step(custom_paths=custom_paths)
def make_module_extra(self):
"""Set python-meep specific environment variables in module."""
txt = super(EB_python_minus_meep, self).make_module_extra()
meep = get_software_root('Meep')
if meep is not None:
txt += self.module_generator.set_environment('MEEP_INCLUDE', os.path.join(meep, 'include'))
txt += self.module_generator.set_environment('MEEP_LIB', os.path.join(meep, 'lib'))
for var in ["PYTHONMEEPPATH", "PYTHONMEEP_INCLUDE", "PYTHONPATH"]:
txt += self.module_generator.set_environment(var, os.path.join(self.installdir, 'site-packages'))
return txt
| ULHPC/easybuild-easyblocks | easybuild/easyblocks/p/python_meep.py | Python | gpl-2.0 | 5,553 |
from flask import request, jsonify
from mysite import app, wclient
from mysite.models import Status
from mysite.weibo import Client
# from mysite.models import weibo
from . import weibo
@weibo.route("/read_status_list")
def read_status_list():
status_list = wclient.get('statuses/mentions/ids')
return jsonify(status_list) | liyigerry/caixiang | mysite/views/weibo/read_status_list.py | Python | mit | 325 |
import os
import subprocess
from plank import task, depends
@task
def unit_tests():
# Run tests in subprocess since we've imported some of the code we are testing
# and coverage will not accurately reflect the lines we test
exit_status = subprocess.check_call(['py.test', '--cov', 'plank', '--cov-report=', 'tests/unit'])
os.rename('.coverage', '.unit.coverage')
if exit_status != 0:
raise Exception('Unit tests failed')
@task
def integration_tests():
# Run tests in subprocess since we've imported some of the code we are testing
# and coverage will not accurately reflect the lines we test
exit_status = subprocess.check_call(['py.test', '--cov', 'plank', '--cov-report=', 'tests/integration'])
os.rename('.coverage', '.integration.coverage')
if exit_status != 0:
raise Exception('Integration tests failed')
@task
def coverage():
import coverage
cov = coverage.coverage()
cov.load()
cov.combine(['.unit.coverage', '.integration.coverage'])
cov.save()
cov.report(show_missing=True)
@task
@depends('unit_tests', 'integration_tests', 'coverage')
def tests():
pass
@task
def package():
from distutils.core import run_setup
run_setup('setup.py', script_args=['sdist'])
| atbentley/plank | planks.py | Python | mit | 1,270 |
# (C) British Crown Copyright 2010 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised before importing anything else
import iris.tests as tests
import os
import iris
import iris.fileformats.pp
import iris.fileformats.pp_rules
import iris.fileformats.rules
import iris.io
import iris.util
import iris.tests.stock
@tests.skip_data
class TestPPLoadCustom(tests.IrisTest):
def setUp(self):
self.subcubes = iris.cube.CubeList()
filename = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
self.template = next(iris.fileformats.pp.load(filename))
def _field_to_cube(self, field):
cube, _, _ = iris.fileformats.rules._make_cube(
field, iris.fileformats.pp_rules.convert)
return cube
def test_lbtim_2(self):
for delta in range(10):
field = self.template.copy()
field.lbtim = 2
field.lbdat += delta
cube = self._field_to_cube(field)
self.subcubes.append(cube)
cube = self.subcubes.merge()[0]
self.assertCML(cube, ('pp_rules', 'lbtim_2.cml'))
def _ocean_depth(self, bounded=False):
lbuser = list(self.template.lbuser)
lbuser[6] = 2
lbuser[3] = 101
lbuser = tuple(lbuser)
for level_and_depth in enumerate([5.0, 15.0, 25.0, 35.0, 45.0]):
field = self.template.copy()
field.lbuser = lbuser
field.lbvc = 2
field.lbfc = 601
field.lblev, field.blev = level_and_depth
if bounded:
brsvd = list(field.brsvd)
brsvd[0] = field.blev - 1
field.brsvd = tuple(brsvd)
field.brlev = field.blev + 1
cube = self._field_to_cube(field)
self.subcubes.append(cube)
def test_ocean_depth(self):
self._ocean_depth()
cube = self.subcubes.merge()[0]
self.assertCML(cube, ('pp_rules', 'ocean_depth.cml'))
def test_ocean_depth_bounded(self):
self._ocean_depth(bounded=True)
cube = self.subcubes.merge()[0]
self.assertCML(cube, ('pp_rules', 'ocean_depth_bounded.cml'))
class TestReferences(tests.IrisTest):
def setUp(self):
target = iris.tests.stock.simple_2d()
target.data = target.data.astype('f4')
self.target = target
self.ref = target.copy()
def test_regrid_missing_coord(self):
# If the target cube is missing one of the source dimension
# coords, ensure the re-grid fails nicely - i.e. returns None.
self.target.remove_coord('bar')
new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref,
self.target)
self.assertIsNone(new_ref)
def test_regrid_codimension(self):
# If the target cube has two of the source dimension coords
# sharing the same dimension (e.g. a trajectory) then ensure
# the re-grid fails nicely - i.e. returns None.
self.target.remove_coord('foo')
new_foo = self.target.coord('bar').copy()
new_foo.rename('foo')
self.target.add_aux_coord(new_foo, 0)
new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref,
self.target)
self.assertIsNone(new_ref)
def test_regrid_identity(self):
new_ref = iris.fileformats.rules._ensure_aligned({}, self.ref,
self.target)
# Bounds don't make it through the re-grid process
self.ref.coord('bar').bounds = None
self.ref.coord('foo').bounds = None
self.assertEqual(new_ref, self.ref)
@tests.skip_data
class TestPPLoading(tests.IrisTest):
def test_simple(self):
cube = iris.tests.stock.simple_pp()
self.assertCML(cube, ('cube_io', 'pp', 'load', 'global.cml'))
@tests.skip_data
class TestPPLoadRules(tests.IrisTest):
def test_pp_load_rules(self):
# Test PP loading and rule evaluation.
cube = iris.tests.stock.simple_pp()
self.assertCML(cube, ('pp_rules', 'global.cml'))
data_path = tests.get_data_path(('PP', 'rotated_uk', 'rotated_uk.pp'))
cube = iris.load(data_path)[0]
self.assertCML(cube, ('pp_rules', 'rotated_uk.cml'))
def test_lbproc(self):
data_path = tests.get_data_path(('PP', 'meanMaxMin', '200806081200__qwpb.T24.pp'))
# Set up standard name and T+24 constraint
constraint = iris.Constraint('air_temperature', forecast_period=24)
cubes = iris.load(data_path, constraint)
cubes = iris.cube.CubeList([cubes[0], cubes[3], cubes[1], cubes[2], cubes[4]])
self.assertCML(cubes, ('pp_rules', 'lbproc_mean_max_min.cml'))
def test_cell_methods(self):
# Test cell methods are created for correct values of lbproc
orig_file = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
# Values that result in cell methods being created
cell_method_values = {64: "mean",
128: "mean within years",
4096: "minimum",
8192: "maximum"}
# Make test values as list of single bit values and some multiple bit values
single_bit_values = list(iris.fileformats.pp.LBPROC_PAIRS)
multiple_bit_values = [(128 + 32, ""), (4096 + 2096, ""), (8192 + 1024, "")]
test_values = list(single_bit_values) + multiple_bit_values
for value, _ in test_values:
f = next(iris.fileformats.pp.load(orig_file))
f.lbproc = value # set value
# Write out pp file
temp_filename = iris.util.create_temp_filename(".pp")
f.save(open(temp_filename, 'wb'))
# Load pp file
cube = iris.load_cube(temp_filename)
if value in cell_method_values:
# Check for cell method on cube
self.assertEqual(cube.cell_methods[0].method, cell_method_values[value])
else:
# Check no cell method was created for values other than 128, 4096, 8192
self.assertEqual(len(cube.cell_methods), 0)
os.remove(temp_filename)
def test_process_flags(self):
# Test that process flags are created for correct values of lbproc
orig_file = tests.get_data_path(('PP', 'aPPglob1', 'global.pp'))
# Values that result in process flags attribute NOT being created
omit_process_flags_values = (64, 128, 4096, 8192)
# Test single flag values
for value, _ in iris.fileformats.pp.LBPROC_PAIRS:
f = next(iris.fileformats.pp.load(orig_file))
f.lbproc = value # set value
# Write out pp file
temp_filename = iris.util.create_temp_filename(".pp")
f.save(open(temp_filename, 'wb'))
# Load pp file
cube = iris.load_cube(temp_filename)
if value in omit_process_flags_values:
# Check ukmo__process_flags attribute not created
self.assertEqual(cube.attributes.get("ukmo__process_flags", None), None)
else:
# Check ukmo__process_flags attribute contains correct values
self.assertIn(iris.fileformats.pp.lbproc_map[value], cube.attributes["ukmo__process_flags"])
os.remove(temp_filename)
# Test multiple flag values
multiple_bit_values = ((128, 32), (4096, 1024), (8192, 1024))
# Maps lbproc value to the process flags that should be created
multiple_map = {sum(x) : [iris.fileformats.pp.lbproc_map[y] for y in x] for x in multiple_bit_values}
for bit_values in multiple_bit_values:
f = next(iris.fileformats.pp.load(orig_file))
f.lbproc = sum(bit_values) # set value
# Write out pp file
temp_filename = iris.util.create_temp_filename(".pp")
f.save(open(temp_filename, 'wb'))
# Load pp file
cube = iris.load_cube(temp_filename)
# Check the process flags created
self.assertEqual(set(cube.attributes['ukmo__process_flags']),
set(multiple_map[sum(bit_values)]),
'Mismatch between expected and actual process '
'flags.')
os.remove(temp_filename)
if __name__ == "__main__":
tests.main()
| jkettleb/iris | lib/iris/tests/test_pp_to_cube.py | Python | lgpl-3.0 | 9,321 |
# -*- coding: utf-8 -*-
"""
TabbedCode specs
=================
"""
import os
from docutils.parsers.rst import Directive
from docutils import nodes
from sphinx.util.osutil import copyfile
CSS_FILE = 'tabbedcode.css'
JS_FILE = 'tabbedcode.js'
class TabbedCodeDirective(Directive):
"""
This directive is intended to be used to contain a group of
code blocks which are being used to show code examples in many different
languages. When rendered as HTML the the examples will all be rolled up
into a single display area with buttons to select between the different
languages.
"""
has_content = True
def run(self):
self.assert_has_content()
text = '\n'.join(self.content)
node = nodes.container(text)
node['classes'].append('tabbed-code')
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
def add_assets(app):
app.add_stylesheet(CSS_FILE)
app.add_javascript(JS_FILE)
def copy_assets(app, exception):
if app.builder.name != 'html' or exception:
return
app.info('Copying tabbedcode stylesheet/javascript... ', nonl=True)
dest = os.path.join(app.builder.outdir, '_static', CSS_FILE)
source = os.path.join(os.path.abspath(os.path.dirname(__file__)), CSS_FILE)
copyfile(source, dest)
dest = os.path.join(app.builder.outdir, '_static', JS_FILE)
source = os.path.join(os.path.abspath(os.path.dirname(__file__)), JS_FILE)
copyfile(source, dest)
app.info('done')
def setup(app):
app.add_directive('tabbed-code', TabbedCodeDirective)
app.connect('builder-inited', add_assets)
app.connect('build-finished', copy_assets)
| ianclegg/eventuate | src/sphinx/_ext/tabbedcode.py | Python | apache-2.0 | 1,718 |
#!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['deformable_ompl'],
package_dir={'': 'src'}
)
setup(**d)
| WPI-ARC/deformable_planners | deformable_ompl/setup.py | Python | bsd-2-clause | 224 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
from __future__ import unicode_literals
from datetime import datetime
import doctest
import unittest
from babel.compat import StringIO, BytesIO
from babel.core import Locale
from babel.messages.catalog import Catalog, Message
from babel.messages import pofile
from babel.util import FixedOffsetTimezone
class ReadPoTestCase(unittest.TestCase):
def test_preserve_locale(self):
buf = StringIO(r'''msgid "foo"
msgstr "Voh"''')
catalog = pofile.read_po(buf, locale='en_US')
self.assertEqual(Locale('en', 'US'), catalog.locale)
def test_preserve_domain(self):
buf = StringIO(r'''msgid "foo"
msgstr "Voh"''')
catalog = pofile.read_po(buf, domain='mydomain')
self.assertEqual('mydomain', catalog.domain)
def test_applies_specified_encoding_during_read(self):
buf = BytesIO('''
msgid ""
msgstr ""
"Project-Id-Version: 3.15\\n"
"Report-Msgid-Bugs-To: Fliegender Zirkus <fliegender@zirkus.de>\\n"
"POT-Creation-Date: 2007-09-27 11:19+0700\\n"
"PO-Revision-Date: 2007-09-27 21:42-0700\\n"
"Last-Translator: John <cleese@bavaria.de>\\n"
"Language-Team: German Lang <de@babel.org>\\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset=iso-8859-1\\n"
"Content-Transfer-Encoding: 8bit\\n"
"Generated-By: Babel 1.0dev-r313\\n"
msgid "foo"
msgstr "bär"'''.encode('iso-8859-1'))
catalog = pofile.read_po(buf, locale='de_DE')
self.assertEqual('b\xe4r', catalog.get('foo').string)
def test_read_multiline(self):
buf = StringIO(r'''msgid ""
"Here's some text that\n"
"includesareallylongwordthatmightbutshouldnt"
" throw us into an infinite "
"loop\n"
msgstr ""''')
catalog = pofile.read_po(buf)
self.assertEqual(1, len(catalog))
message = list(catalog)[1]
self.assertEqual("Here's some text that\nincludesareallylongwordthat"
"mightbutshouldnt throw us into an infinite loop\n",
message.id)
def test_fuzzy_header(self):
buf = StringIO(r'''\
# Translations template for AReallyReallyLongNameForAProject.
# Copyright (C) 2007 ORGANIZATION
# This file is distributed under the same license as the
# AReallyReallyLongNameForAProject project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
#, fuzzy
''')
catalog = pofile.read_po(buf)
self.assertEqual(1, len(list(catalog)))
self.assertEqual(True, list(catalog)[0].fuzzy)
def test_not_fuzzy_header(self):
buf = StringIO(r'''\
# Translations template for AReallyReallyLongNameForAProject.
# Copyright (C) 2007 ORGANIZATION
# This file is distributed under the same license as the
# AReallyReallyLongNameForAProject project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
''')
catalog = pofile.read_po(buf)
self.assertEqual(1, len(list(catalog)))
self.assertEqual(False, list(catalog)[0].fuzzy)
def test_header_entry(self):
buf = StringIO(r'''\
# SOME DESCRIPTIVE TITLE.
# Copyright (C) 2007 THE PACKAGE'S COPYRIGHT HOLDER
# This file is distributed under the same license as the PACKAGE package.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: 3.15\n"
"Report-Msgid-Bugs-To: Fliegender Zirkus <fliegender@zirkus.de>\n"
"POT-Creation-Date: 2007-09-27 11:19+0700\n"
"PO-Revision-Date: 2007-09-27 21:42-0700\n"
"Last-Translator: John <cleese@bavaria.de>\n"
"Language-Team: German Lang <de@babel.org>\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=iso-8859-2\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 1.0dev-r313\n"
''')
catalog = pofile.read_po(buf)
self.assertEqual(1, len(list(catalog)))
self.assertEqual('3.15', catalog.version)
self.assertEqual('Fliegender Zirkus <fliegender@zirkus.de>',
catalog.msgid_bugs_address)
self.assertEqual(datetime(2007, 9, 27, 11, 19,
tzinfo=FixedOffsetTimezone(7 * 60)),
catalog.creation_date)
self.assertEqual('John <cleese@bavaria.de>', catalog.last_translator)
self.assertEqual('German Lang <de@babel.org>', catalog.language_team)
self.assertEqual('iso-8859-2', catalog.charset)
self.assertEqual(True, list(catalog)[0].fuzzy)
def test_obsolete_message(self):
buf = StringIO(r'''# This is an obsolete message
#~ msgid "foo"
#~ msgstr "Voh"
# This message is not obsolete
#: main.py:1
msgid "bar"
msgstr "Bahr"
''')
catalog = pofile.read_po(buf)
self.assertEqual(1, len(catalog))
self.assertEqual(1, len(catalog.obsolete))
message = catalog.obsolete['foo']
self.assertEqual('foo', message.id)
self.assertEqual('Voh', message.string)
self.assertEqual(['This is an obsolete message'], message.user_comments)
def test_obsolete_message_ignored(self):
buf = StringIO(r'''# This is an obsolete message
#~ msgid "foo"
#~ msgstr "Voh"
# This message is not obsolete
#: main.py:1
msgid "bar"
msgstr "Bahr"
''')
catalog = pofile.read_po(buf, ignore_obsolete=True)
self.assertEqual(1, len(catalog))
self.assertEqual(0, len(catalog.obsolete))
def test_with_context(self):
buf = StringIO(r'''# Some string in the menu
#: main.py:1
msgctxt "Menu"
msgid "foo"
msgstr "Voh"
# Another string in the menu
#: main.py:2
msgctxt "Menu"
msgid "bar"
msgstr "Bahr"
''')
catalog = pofile.read_po(buf, ignore_obsolete=True)
self.assertEqual(2, len(catalog))
message = catalog.get('foo', context='Menu')
self.assertEqual('Menu', message.context)
message = catalog.get('bar', context='Menu')
self.assertEqual('Menu', message.context)
# And verify it pass through write_po
out_buf = BytesIO()
pofile.write_po(out_buf, catalog, omit_header=True)
assert out_buf.getvalue().strip() == buf.getvalue().strip().encode('latin-1'), \
out_buf.getvalue()
def test_with_context_two(self):
buf = StringIO(r'''msgctxt "Menu"
msgid "foo"
msgstr "Voh"
msgctxt "Mannu"
msgid "bar"
msgstr "Bahr"
''')
catalog = pofile.read_po(buf, ignore_obsolete=True)
self.assertEqual(2, len(catalog))
message = catalog.get('foo', context='Menu')
self.assertEqual('Menu', message.context)
message = catalog.get('bar', context='Mannu')
self.assertEqual('Mannu', message.context)
# And verify it pass through write_po
out_buf = BytesIO()
pofile.write_po(out_buf, catalog, omit_header=True)
assert out_buf.getvalue().strip() == buf.getvalue().strip().encode('latin-1'), out_buf.getvalue()
def test_single_plural_form(self):
buf = StringIO(r'''msgid "foo"
msgid_plural "foos"
msgstr[0] "Voh"''')
catalog = pofile.read_po(buf, locale='ja_JP')
self.assertEqual(1, len(catalog))
self.assertEqual(1, catalog.num_plurals)
message = catalog['foo']
self.assertEqual(1, len(message.string))
def test_singular_plural_form(self):
buf = StringIO(r'''msgid "foo"
msgid_plural "foos"
msgstr[0] "Voh"
msgstr[1] "Vohs"''')
catalog = pofile.read_po(buf, locale='nl_NL')
self.assertEqual(1, len(catalog))
self.assertEqual(2, catalog.num_plurals)
message = catalog['foo']
self.assertEqual(2, len(message.string))
def test_more_than_two_plural_forms(self):
buf = StringIO(r'''msgid "foo"
msgid_plural "foos"
msgstr[0] "Voh"
msgstr[1] "Vohs"
msgstr[2] "Vohss"''')
catalog = pofile.read_po(buf, locale='lv_LV')
self.assertEqual(1, len(catalog))
self.assertEqual(3, catalog.num_plurals)
message = catalog['foo']
self.assertEqual(3, len(message.string))
self.assertEqual('Vohss', message.string[2])
def test_plural_with_square_brackets(self):
buf = StringIO(r'''msgid "foo"
msgid_plural "foos"
msgstr[0] "Voh [text]"
msgstr[1] "Vohs [text]"''')
catalog = pofile.read_po(buf, locale='nb_NO')
self.assertEqual(1, len(catalog))
self.assertEqual(2, catalog.num_plurals)
message = catalog['foo']
self.assertEqual(2, len(message.string))
class WritePoTestCase(unittest.TestCase):
def test_join_locations(self):
catalog = Catalog()
catalog.add('foo', locations=[('main.py', 1)])
catalog.add('foo', locations=[('utils.py', 3)])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True)
self.assertEqual(b'''#: main.py:1 utils.py:3
msgid "foo"
msgstr ""''', buf.getvalue().strip())
def test_write_po_file_with_specified_charset(self):
catalog = Catalog(charset='iso-8859-1')
catalog.add('foo', '\xe4\xf6\xfc', locations=[('main.py', 1)])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=False)
po_file = buf.getvalue().strip()
assert br'"Content-Type: text/plain; charset=iso-8859-1\n"' in po_file
assert 'msgstr "\xe4\xf6\xfc"'.encode('iso-8859-1') in po_file
def test_duplicate_comments(self):
catalog = Catalog()
catalog.add('foo', auto_comments=['A comment'])
catalog.add('foo', auto_comments=['A comment'])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True)
self.assertEqual(b'''#. A comment
msgid "foo"
msgstr ""''', buf.getvalue().strip())
def test_wrap_long_lines(self):
text = """Here's some text where%s
white space and line breaks matter, and should
not be removed
""" % (" " * 7)
catalog = Catalog()
catalog.add(text, locations=[('main.py', 1)])
buf = BytesIO()
pofile.write_po(buf, catalog, no_location=True, omit_header=True,
width=42)
self.assertEqual(br'''msgid ""
"Here's some text where \n"
"white space and line breaks matter, and"
" should\n"
"\n"
"not be removed\n"
"\n"
msgstr ""''', buf.getvalue().strip())
def test_wrap_long_lines_with_long_word(self):
text = """Here's some text that
includesareallylongwordthatmightbutshouldnt throw us into an infinite loop
"""
catalog = Catalog()
catalog.add(text, locations=[('main.py', 1)])
buf = BytesIO()
pofile.write_po(buf, catalog, no_location=True, omit_header=True,
width=32)
self.assertEqual(br'''msgid ""
"Here's some text that\n"
"includesareallylongwordthatmightbutshouldnt"
" throw us into an infinite "
"loop\n"
msgstr ""''', buf.getvalue().strip())
def test_wrap_long_lines_in_header(self):
"""
Verify that long lines in the header comment are wrapped correctly.
"""
catalog = Catalog(project='AReallyReallyLongNameForAProject',
revision_date=datetime(2007, 4, 1))
buf = BytesIO()
pofile.write_po(buf, catalog)
self.assertEqual(b'''\
# Translations template for AReallyReallyLongNameForAProject.
# Copyright (C) 2007 ORGANIZATION
# This file is distributed under the same license as the
# AReallyReallyLongNameForAProject project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2007.
#
#, fuzzy''', b'\n'.join(buf.getvalue().splitlines()[:7]))
def test_wrap_locations_with_hyphens(self):
catalog = Catalog()
catalog.add('foo', locations=[
('doupy/templates/base/navmenu.inc.html.py', 60)
])
catalog.add('foo', locations=[
('doupy/templates/job-offers/helpers.html', 22)
])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True)
self.assertEqual(b'''#: doupy/templates/base/navmenu.inc.html.py:60
#: doupy/templates/job-offers/helpers.html:22
msgid "foo"
msgstr ""''', buf.getvalue().strip())
def test_no_wrap_and_width_behaviour_on_comments(self):
catalog = Catalog()
catalog.add("Pretty dam long message id, which must really be big "
"to test this wrap behaviour, if not it won't work.",
locations=[("fake.py", n) for n in range(1, 30)])
buf = BytesIO()
pofile.write_po(buf, catalog, width=None, omit_header=True)
self.assertEqual(b"""\
#: fake.py:1 fake.py:2 fake.py:3 fake.py:4 fake.py:5 fake.py:6 fake.py:7
#: fake.py:8 fake.py:9 fake.py:10 fake.py:11 fake.py:12 fake.py:13 fake.py:14
#: fake.py:15 fake.py:16 fake.py:17 fake.py:18 fake.py:19 fake.py:20 fake.py:21
#: fake.py:22 fake.py:23 fake.py:24 fake.py:25 fake.py:26 fake.py:27 fake.py:28
#: fake.py:29
msgid "pretty dam long message id, which must really be big to test this wrap behaviour, if not it won't work."
msgstr ""
""", buf.getvalue().lower())
buf = BytesIO()
pofile.write_po(buf, catalog, width=100, omit_header=True)
self.assertEqual(b"""\
#: fake.py:1 fake.py:2 fake.py:3 fake.py:4 fake.py:5 fake.py:6 fake.py:7 fake.py:8 fake.py:9 fake.py:10
#: fake.py:11 fake.py:12 fake.py:13 fake.py:14 fake.py:15 fake.py:16 fake.py:17 fake.py:18 fake.py:19
#: fake.py:20 fake.py:21 fake.py:22 fake.py:23 fake.py:24 fake.py:25 fake.py:26 fake.py:27 fake.py:28
#: fake.py:29
msgid ""
"pretty dam long message id, which must really be big to test this wrap behaviour, if not it won't"
" work."
msgstr ""
""", buf.getvalue().lower())
def test_pot_with_translator_comments(self):
catalog = Catalog()
catalog.add('foo', locations=[('main.py', 1)],
auto_comments=['Comment About `foo`'])
catalog.add('bar', locations=[('utils.py', 3)],
user_comments=['Comment About `bar` with',
'multiple lines.'])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True)
self.assertEqual(b'''#. Comment About `foo`
#: main.py:1
msgid "foo"
msgstr ""
# Comment About `bar` with
# multiple lines.
#: utils.py:3
msgid "bar"
msgstr ""''', buf.getvalue().strip())
def test_po_with_obsolete_message(self):
catalog = Catalog()
catalog.add('foo', 'Voh', locations=[('main.py', 1)])
catalog.obsolete['bar'] = Message('bar', 'Bahr',
locations=[('utils.py', 3)],
user_comments=['User comment'])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True)
self.assertEqual(b'''#: main.py:1
msgid "foo"
msgstr "Voh"
# User comment
#~ msgid "bar"
#~ msgstr "Bahr"''', buf.getvalue().strip())
def test_po_with_multiline_obsolete_message(self):
catalog = Catalog()
catalog.add('foo', 'Voh', locations=[('main.py', 1)])
msgid = r"""Here's a message that covers
multiple lines, and should still be handled
correctly.
"""
msgstr = r"""Here's a message that covers
multiple lines, and should still be handled
correctly.
"""
catalog.obsolete[msgid] = Message(msgid, msgstr,
locations=[('utils.py', 3)])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True)
self.assertEqual(br'''#: main.py:1
msgid "foo"
msgstr "Voh"
#~ msgid ""
#~ "Here's a message that covers\n"
#~ "multiple lines, and should still be handled\n"
#~ "correctly.\n"
#~ msgstr ""
#~ "Here's a message that covers\n"
#~ "multiple lines, and should still be handled\n"
#~ "correctly.\n"''', buf.getvalue().strip())
def test_po_with_obsolete_message_ignored(self):
catalog = Catalog()
catalog.add('foo', 'Voh', locations=[('main.py', 1)])
catalog.obsolete['bar'] = Message('bar', 'Bahr',
locations=[('utils.py', 3)],
user_comments=['User comment'])
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True, ignore_obsolete=True)
self.assertEqual(b'''#: main.py:1
msgid "foo"
msgstr "Voh"''', buf.getvalue().strip())
def test_po_with_previous_msgid(self):
catalog = Catalog()
catalog.add('foo', 'Voh', locations=[('main.py', 1)],
previous_id='fo')
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True, include_previous=True)
self.assertEqual(b'''#: main.py:1
#| msgid "fo"
msgid "foo"
msgstr "Voh"''', buf.getvalue().strip())
def test_po_with_previous_msgid_plural(self):
catalog = Catalog()
catalog.add(('foo', 'foos'), ('Voh', 'Voeh'),
locations=[('main.py', 1)], previous_id=('fo', 'fos'))
buf = BytesIO()
pofile.write_po(buf, catalog, omit_header=True, include_previous=True)
self.assertEqual(b'''#: main.py:1
#| msgid "fo"
#| msgid_plural "fos"
msgid "foo"
msgid_plural "foos"
msgstr[0] "Voh"
msgstr[1] "Voeh"''', buf.getvalue().strip())
def test_sorted_po(self):
catalog = Catalog()
catalog.add('bar', locations=[('utils.py', 3)],
user_comments=['Comment About `bar` with',
'multiple lines.'])
catalog.add(('foo', 'foos'), ('Voh', 'Voeh'),
locations=[('main.py', 1)])
buf = BytesIO()
pofile.write_po(buf, catalog, sort_output=True)
value = buf.getvalue().strip()
assert b'''\
# Comment About `bar` with
# multiple lines.
#: utils.py:3
msgid "bar"
msgstr ""
#: main.py:1
msgid "foo"
msgid_plural "foos"
msgstr[0] "Voh"
msgstr[1] "Voeh"''' in value
assert value.find(b'msgid ""') < value.find(b'msgid "bar"') < value.find(b'msgid "foo"')
def test_silent_location_fallback(self):
buf = StringIO('''\
#: broken_file.py
msgid "missing line number"
msgstr ""
#: broken_file.py:broken_line_number
msgid "broken line number"
msgstr ""''')
catalog = pofile.read_po(buf)
self.assertEqual(catalog['missing line number'].locations, [])
self.assertEqual(catalog['broken line number'].locations, [])
class PofileFunctionsTestCase(unittest.TestCase):
def test_unescape(self):
escaped = '"Say:\\n \\"hello, world!\\"\\n"'
unescaped = 'Say:\n "hello, world!"\n'
self.assertNotEqual(unescaped, escaped)
self.assertEqual(unescaped, pofile.unescape(escaped))
def test_unescape_of_quoted_newline(self):
# regression test for #198
self.assertEqual(r'\n', pofile.unescape(r'"\\n"'))
def test_denormalize_on_msgstr_without_empty_first_line(self):
# handle irregular multi-line msgstr (no "" as first line)
# gracefully (#171)
msgstr = '"multi-line\\n"\n" translation"'
expected_denormalized = 'multi-line\n translation'
self.assertEqual(expected_denormalized, pofile.denormalize(msgstr))
self.assertEqual(expected_denormalized,
pofile.denormalize('""\n' + msgstr))
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(pofile, optionflags=doctest.ELLIPSIS))
suite.addTest(unittest.makeSuite(ReadPoTestCase))
suite.addTest(unittest.makeSuite(WritePoTestCase))
suite.addTest(unittest.makeSuite(PofileFunctionsTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| vsajip/babel3 | babel/messages/tests/pofile.py | Python | bsd-3-clause | 19,864 |
# -*- coding: utf-8 -*-
# setup.py
#
# Copyright (C) 2009-2010 Damien Churchill <damoxc@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
from setuptools import setup, find_packages
setup(name='FBUploader',
version='0.2',
license='GPLv3',
description='Facebook photo uploader',
long_description='''''',
author='Damien Churchill',
author_email='damoxc@gmail.com',
packages=['fbuploader'],
package_data={'fbuploader': ['data/*']},
entry_points = """
[console_scripts]
fbuploader = fbuploader.main:main
"""
)
| damoxc/fbuploader | setup.py | Python | gpl-3.0 | 1,250 |
"""
Helper functions for creating Form classes from Django models
and database field objects.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from itertools import chain
import warnings
from django.core.exceptions import (
ImproperlyConfigured, ValidationError, NON_FIELD_ERRORS, FieldError)
from django.forms.fields import Field, ChoiceField
from django.forms.forms import DeclarativeFieldsMetaclass, BaseForm
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.utils import ErrorList
from django.forms.widgets import (SelectMultiple, HiddenInput,
MultipleHiddenInput)
from django.utils import six
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.encoding import smart_text, force_text
from django.utils.text import get_text_list, capfirst
from django.utils.translation import ugettext_lazy as _, ugettext
__all__ = (
'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model',
'save_instance', 'ModelChoiceField', 'ModelMultipleChoiceField',
'ALL_FIELDS', 'BaseModelFormSet', 'modelformset_factory',
'BaseInlineFormSet', 'inlineformset_factory', 'modelform_factory',
)
ALL_FIELDS = '__all__'
def construct_instance(form, instance, fields=None, exclude=None):
"""
Constructs and returns a model instance from the bound ``form``'s
``cleaned_data``, but does not save the returned instance to the
database.
"""
from django.db import models
opts = instance._meta
cleaned_data = form.cleaned_data
file_field_list = []
for f in opts.fields:
if not f.editable or isinstance(f, models.AutoField) \
or f.name not in cleaned_data:
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
# Defer saving file-type fields until after the other fields, so a
# callable upload_to can use the values from other fields.
if isinstance(f, models.FileField):
file_field_list.append(f)
else:
f.save_form_data(instance, cleaned_data[f.name])
for f in file_field_list:
f.save_form_data(instance, cleaned_data[f.name])
return instance
def save_instance(form, instance, fields=None, fail_message='saved',
commit=True, exclude=None, construct=True):
"""
Saves bound Form ``form``'s cleaned_data into model instance ``instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
If construct=False, assume ``instance`` has already been constructed and
just needs to be saved.
"""
if construct:
instance = construct_instance(form, instance, fields, exclude)
opts = instance._meta
if form.errors:
raise ValueError("The %s could not be %s because the data didn't"
" validate." % (opts.object_name, fail_message))
# Wrap up the saving of m2m data as a function.
def save_m2m():
cleaned_data = form.cleaned_data
# Note that for historical reasons we want to include also
# virtual_fields here. (GenericRelation was previously a fake
# m2m field).
for f in chain(opts.many_to_many, opts.virtual_fields):
if not hasattr(f, 'save_form_data'):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if f.name in cleaned_data:
f.save_form_data(instance, cleaned_data[f.name])
if commit:
# If we are committing, save the instance and the m2m data immediately.
instance.save()
save_m2m()
else:
# We're not committing. Add a method to the form to allow deferred
# saving of m2m data.
form.save_m2m = save_m2m
return instance
# ModelForms #################################################################
def model_to_dict(instance, fields=None, exclude=None):
"""
Returns a dict containing the data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
# avoid a circular import
from django.db.models.fields.related import ManyToManyField
opts = instance._meta
data = {}
for f in chain(opts.concrete_fields, opts.virtual_fields, opts.many_to_many):
if not getattr(f, 'editable', False):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if isinstance(f, ManyToManyField):
# If the object doesn't have a primary key yet, just use an empty
# list for its m2m fields. Calling f.value_from_object will raise
# an exception.
if instance.pk is None:
data[f.name] = []
else:
# MultipleChoiceWidget needs a list of pks, not object instances.
qs = f.value_from_object(instance)
if qs._result_cache is not None:
data[f.name] = [item.pk for item in qs]
else:
data[f.name] = list(qs.values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
def fields_for_model(model, fields=None, exclude=None, widgets=None,
formfield_callback=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None):
"""
Returns a ``OrderedDict`` containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``localized_fields`` is a list of names of fields which should be localized.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
"""
field_list = []
ignored = []
opts = model._meta
# Avoid circular import
from django.db.models.fields import Field as ModelField
sortable_virtual_fields = [f for f in opts.virtual_fields
if isinstance(f, ModelField)]
for f in sorted(chain(opts.concrete_fields, sortable_virtual_fields, opts.many_to_many)):
if not getattr(f, 'editable', False):
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
kwargs = {}
if widgets and f.name in widgets:
kwargs['widget'] = widgets[f.name]
if localized_fields == ALL_FIELDS or (localized_fields and f.name in localized_fields):
kwargs['localize'] = True
if labels and f.name in labels:
kwargs['label'] = labels[f.name]
if help_texts and f.name in help_texts:
kwargs['help_text'] = help_texts[f.name]
if error_messages and f.name in error_messages:
kwargs['error_messages'] = error_messages[f.name]
if formfield_callback is None:
formfield = f.formfield(**kwargs)
elif not callable(formfield_callback):
raise TypeError('formfield_callback must be a function or callable')
else:
formfield = formfield_callback(f, **kwargs)
if formfield:
field_list.append((f.name, formfield))
else:
ignored.append(f.name)
field_dict = OrderedDict(field_list)
if fields:
field_dict = OrderedDict(
[(f, field_dict.get(f)) for f in fields
if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)]
)
return field_dict
class ModelFormOptions(object):
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
self.widgets = getattr(options, 'widgets', None)
self.localized_fields = getattr(options, 'localized_fields', None)
self.labels = getattr(options, 'labels', None)
self.help_texts = getattr(options, 'help_texts', None)
self.error_messages = getattr(options, 'error_messages', None)
class ModelFormMetaclass(DeclarativeFieldsMetaclass):
def __new__(mcs, name, bases, attrs):
formfield_callback = attrs.pop('formfield_callback', None)
new_class = super(ModelFormMetaclass, mcs).__new__(mcs, name, bases, attrs)
if bases == (BaseModelForm,):
return new_class
opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None))
# We check if a string was passed to `fields` or `exclude`,
# which is likely to be a mistake where the user typed ('foo') instead
# of ('foo',)
for opt in ['fields', 'exclude', 'localized_fields']:
value = getattr(opts, opt)
if isinstance(value, six.string_types) and value != ALL_FIELDS:
msg = ("%(model)s.Meta.%(opt)s cannot be a string. "
"Did you mean to type: ('%(value)s',)?" % {
'model': new_class.__name__,
'opt': opt,
'value': value,
})
raise TypeError(msg)
if opts.model:
# If a model is defined, extract form fields from it.
if opts.fields is None and opts.exclude is None:
raise ImproperlyConfigured(
"Creating a ModelForm without either the 'fields' attribute "
"or the 'exclude' attribute is prohibited; form %s "
"needs updating." % name
)
if opts.fields == ALL_FIELDS:
# Sentinel for fields_for_model to indicate "get the list of
# fields from the model"
opts.fields = None
fields = fields_for_model(opts.model, opts.fields, opts.exclude,
opts.widgets, formfield_callback,
opts.localized_fields, opts.labels,
opts.help_texts, opts.error_messages)
# make sure opts.fields doesn't specify an invalid field
none_model_fields = [k for k, v in six.iteritems(fields) if not v]
missing_fields = (set(none_model_fields) -
set(new_class.declared_fields.keys()))
if missing_fields:
message = 'Unknown field(s) (%s) specified for %s'
message = message % (', '.join(missing_fields),
opts.model.__name__)
raise FieldError(message)
# Override default model fields with any custom declared ones
# (plus, include all the other declared fields).
fields.update(new_class.declared_fields)
else:
fields = new_class.declared_fields
new_class.base_fields = fields
return new_class
class BaseModelForm(BaseForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False, instance=None):
opts = self._meta
if opts.model is None:
raise ValueError('ModelForm has no model class specified.')
if instance is None:
# if we didn't get an instance, instantiate a new one
self.instance = opts.model()
object_data = {}
else:
self.instance = instance
object_data = model_to_dict(instance, opts.fields, opts.exclude)
# if initial was provided, it should override the values from instance
if initial is not None:
object_data.update(initial)
# self._validate_unique will be set to True by BaseModelForm.clean().
# It is False by default so overriding self.clean() and failing to call
# super will stop validate_unique from being called.
self._validate_unique = False
super(BaseModelForm, self).__init__(data, files, auto_id, prefix, object_data,
error_class, label_suffix, empty_permitted)
# Apply ``limit_choices_to`` to each field.
for field_name in self.fields:
formfield = self.fields[field_name]
if hasattr(formfield, 'queryset') and hasattr(formfield, 'get_limit_choices_to'):
limit_choices_to = formfield.get_limit_choices_to()
if limit_choices_to is not None:
formfield.queryset = formfield.queryset.complex_filter(limit_choices_to)
def _get_validation_exclusions(self):
"""
For backwards-compatibility, several types of fields need to be
excluded from model validation. See the following tickets for
details: #12507, #12521, #12553
"""
exclude = []
# Build up a list of fields that should be excluded from model field
# validation and unique checks.
for f in self.instance._meta.fields:
field = f.name
# Exclude fields that aren't on the form. The developer may be
# adding these values to the model after form validation.
if field not in self.fields:
exclude.append(f.name)
# Don't perform model validation on fields that were defined
# manually on the form and excluded via the ModelForm's Meta
# class. See #12901.
elif self._meta.fields and field not in self._meta.fields:
exclude.append(f.name)
elif self._meta.exclude and field in self._meta.exclude:
exclude.append(f.name)
# Exclude fields that failed form validation. There's no need for
# the model fields to validate them as well.
elif field in self._errors.keys():
exclude.append(f.name)
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
# validation if the model field allows blanks. If it does, the blank
# value may be included in a unique check, so cannot be excluded
# from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field, None)
if not f.blank and not form_field.required and field_value in form_field.empty_values:
exclude.append(f.name)
return exclude
def clean(self):
self._validate_unique = True
return self.cleaned_data
def _update_errors(self, errors):
# Override any validation error messages defined at the model level
# with those defined at the form level.
opts = self._meta
for field, messages in errors.error_dict.items():
if (field == NON_FIELD_ERRORS and opts.error_messages and
NON_FIELD_ERRORS in opts.error_messages):
error_messages = opts.error_messages[NON_FIELD_ERRORS]
elif field in self.fields:
error_messages = self.fields[field].error_messages
else:
continue
for message in messages:
if (isinstance(message, ValidationError) and
message.code in error_messages):
message.message = error_messages[message.code]
self.add_error(None, errors)
def _post_clean(self):
opts = self._meta
exclude = self._get_validation_exclusions()
# a subset of `exclude` which won't have the InlineForeignKeyField
# if we're adding a new object since that value doesn't exist
# until after the new instance is saved to the database.
construct_instance_exclude = list(exclude)
# Foreign Keys being used to represent inline relationships
# are excluded from basic field value validation. This is for two
# reasons: firstly, the value may not be supplied (#12507; the
# case of providing new values to the admin); secondly the
# object being referred to may not yet fully exist (#12749).
# However, these fields *must* be included in uniqueness checks,
# so this can't be part of _get_validation_exclusions().
for name, field in self.fields.items():
if isinstance(field, InlineForeignKeyField):
if self.cleaned_data.get(name) is not None and self.cleaned_data[name]._state.adding:
construct_instance_exclude.append(name)
exclude.append(name)
# Update the model instance with self.cleaned_data.
self.instance = construct_instance(self, self.instance, opts.fields, construct_instance_exclude)
try:
self.instance.full_clean(exclude=exclude, validate_unique=False)
except ValidationError as e:
self._update_errors(e)
# Validate uniqueness if needed.
if self._validate_unique:
self.validate_unique()
def validate_unique(self):
"""
Calls the instance's validate_unique() method and updates the form's
validation errors if any were raised.
"""
exclude = self._get_validation_exclusions()
try:
self.instance.validate_unique(exclude=exclude)
except ValidationError as e:
self._update_errors(e)
def save(self, commit=True):
"""
Saves this ``form``'s cleaned_data into model instance
``self.instance``.
If commit=True, then the changes to ``instance`` will be saved to the
database. Returns ``instance``.
"""
if self.instance.pk is None:
fail_message = 'created'
else:
fail_message = 'changed'
return save_instance(self, self.instance, self._meta.fields,
fail_message, commit, self._meta.exclude,
construct=False)
save.alters_data = True
class ModelForm(six.with_metaclass(ModelFormMetaclass, BaseModelForm)):
pass
def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
formfield_callback=None, widgets=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None):
"""
Returns a ModelForm containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields. If omitted or '__all__',
all fields will be used.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``localized_fields`` is a list of names of fields which should be localized.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
"""
# Create the inner Meta class. FIXME: ideally, we should be able to
# construct a ModelForm without creating and passing in a temporary
# inner class.
# Build up a list of attributes that the Meta object will have.
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
if widgets is not None:
attrs['widgets'] = widgets
if localized_fields is not None:
attrs['localized_fields'] = localized_fields
if labels is not None:
attrs['labels'] = labels
if help_texts is not None:
attrs['help_texts'] = help_texts
if error_messages is not None:
attrs['error_messages'] = error_messages
# If parent form class already has an inner Meta, the Meta we're
# creating needs to inherit from the parent's inner meta.
parent = (object,)
if hasattr(form, 'Meta'):
parent = (form.Meta, object)
Meta = type(str('Meta'), parent, attrs)
# Give this new form class a reasonable name.
class_name = model.__name__ + str('Form')
# Class attributes for the new form class.
form_class_attrs = {
'Meta': Meta,
'formfield_callback': formfield_callback
}
if (getattr(Meta, 'fields', None) is None and
getattr(Meta, 'exclude', None) is None):
raise ImproperlyConfigured(
"Calling modelform_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
# Instatiate type(form) in order to use the same metaclass as form.
return type(form)(class_name, (form,), form_class_attrs)
# ModelFormSets ##############################################################
class BaseModelFormSet(BaseFormSet):
"""
A ``FormSet`` for editing a queryset and/or adding new objects to it.
"""
model = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
queryset=None, **kwargs):
self.queryset = queryset
self.initial_extra = kwargs.pop('initial', None)
defaults = {'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix}
defaults.update(kwargs)
super(BaseModelFormSet, self).__init__(**defaults)
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if not (self.data or self.files):
return len(self.get_queryset())
return super(BaseModelFormSet, self).initial_form_count()
def _existing_object(self, pk):
if not hasattr(self, '_object_dict'):
self._object_dict = {o.pk: o for o in self.get_queryset()}
return self._object_dict.get(pk)
def _get_to_python(self, field):
"""
If the field is a related field, fetch the concrete field's (that
is, the ultimate pointed-to field's) to_python.
"""
while field.rel is not None:
field = field.rel.get_related_field()
return field.to_python
def _construct_form(self, i, **kwargs):
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
pk_field = self.model._meta.pk
to_python = self._get_to_python(pk_field)
pk = to_python(pk)
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and 'instance' not in kwargs:
kwargs['instance'] = self.get_queryset()[i]
if i >= self.initial_form_count() and self.initial_extra:
# Set initial values for extra forms
try:
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
except IndexError:
pass
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def get_queryset(self):
if not hasattr(self, '_queryset'):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_queryset()
# If the queryset isn't already ordered we need to add an
# artificial ordering here to make sure that all formsets
# constructed from this queryset have the same form order.
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
# Removed queryset limiting here. As per discussion re: #13023
# on django-dev, max_num should not prevent existing
# related objects/inlines from being displayed.
self._queryset = qs
return self._queryset
def save_new(self, form, commit=True):
"""Saves and returns a new model instance for the given form."""
return form.save(commit=commit)
def save_existing(self, form, instance, commit=True):
"""Saves and returns an existing model instance for the given form."""
return form.save(commit=commit)
def save(self, commit=True):
"""Saves model instances for every form, adding and changing instances
as necessary, and returns the list of instances.
"""
if not commit:
self.saved_forms = []
def save_m2m():
for form in self.saved_forms:
form.save_m2m()
self.save_m2m = save_m2m
return self.save_existing_objects(commit) + self.save_new_objects(commit)
save.alters_data = True
def clean(self):
self.validate_unique()
def validate_unique(self):
# Collect unique_checks and date_checks to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
exclude = form._get_validation_exclusions()
unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude)
all_unique_checks = all_unique_checks.union(set(unique_checks))
all_date_checks = all_date_checks.union(set(date_checks))
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# get data for each field of each of unique_check
row_data = (form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
# iterate over each of the date checks now
for date_check in all_date_checks:
seen_data = set()
uclass, lookup, field, unique_for = date_check
for form in valid_forms:
# see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields
if lookup == 'date':
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
# otherwise it's just the attribute on the date/datetime
# object
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field],) + date_data
# if we've already seen it then we have a uniqueness failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_date_error_message(date_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(data)
if errors:
raise ValidationError(errors)
def get_unique_error_message(self, unique_check):
if len(unique_check) == 1:
return ugettext("Please correct the duplicate data for %(field)s.") % {
"field": unique_check[0],
}
else:
return ugettext("Please correct the duplicate data for %(field)s, "
"which must be unique.") % {
"field": get_text_list(unique_check, six.text_type(_("and"))),
}
def get_date_error_message(self, date_check):
return ugettext("Please correct the duplicate data for %(field_name)s "
"which must be unique for the %(lookup)s in %(date_field)s.") % {
'field_name': date_check[2],
'date_field': date_check[3],
'lookup': six.text_type(date_check[1]),
}
def get_form_error(self):
return ugettext("Please correct the duplicate values below.")
def save_existing_objects(self, commit=True):
self.changed_objects = []
self.deleted_objects = []
if not self.initial_forms:
return []
saved_instances = []
forms_to_delete = self.deleted_forms
for form in self.initial_forms:
obj = form.instance
if form in forms_to_delete:
# If the pk is None, it means that the object can't be
# deleted again. Possible reason for this is that the
# object was already deleted from the DB. Refs #14877.
if obj.pk is None:
continue
self.deleted_objects.append(obj)
if commit:
obj.delete()
elif form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def save_new_objects(self, commit=True):
self.new_objects = []
for form in self.extra_forms:
if not form.has_changed():
continue
# If someone has marked an add form for deletion, don't save the
# object.
if self.can_delete and self._should_delete_form(form):
continue
self.new_objects.append(self.save_new(form, commit=commit))
if not commit:
self.saved_forms.append(form)
return self.new_objects
def add_fields(self, form, index):
"""Add a hidden field for the object's primary key."""
from django.db.models import AutoField, OneToOneField, ForeignKey
self._pk_field = pk = self.model._meta.pk
# If a pk isn't editable, then it won't be on the form, so we need to
# add it here so we can tell which object is which when we get the
# data back. Generally, pk.editable should be false, but for some
# reason, auto_created pk fields and AutoField's editable attribute is
# True, so check for that as well.
def pk_is_not_editable(pk):
return ((not pk.editable) or (pk.auto_created or isinstance(pk, AutoField))
or (pk.rel and pk.rel.parent_link and pk_is_not_editable(pk.rel.to._meta.pk)))
if pk_is_not_editable(pk) or pk.name not in form.fields:
if form.is_bound:
pk_value = form.instance.pk
else:
try:
if index is not None:
pk_value = self.get_queryset()[index].pk
else:
pk_value = None
except IndexError:
pk_value = None
if isinstance(pk, OneToOneField) or isinstance(pk, ForeignKey):
qs = pk.rel.to._default_manager.get_queryset()
else:
qs = self.model._default_manager.get_queryset()
qs = qs.using(form.instance._state.db)
if form._meta.widgets:
widget = form._meta.widgets.get(self._pk_field.name, HiddenInput)
else:
widget = HiddenInput
form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=widget)
super(BaseModelFormSet, self).add_fields(form, index)
def modelformset_factory(model, form=ModelForm, formfield_callback=None,
formset=BaseModelFormSet, extra=1, can_delete=False,
can_order=False, max_num=None, fields=None, exclude=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False):
"""
Returns a FormSet class for the given Django model class.
"""
meta = getattr(form, 'Meta', None)
if meta is None:
meta = type(str('Meta'), (object,), {})
if (getattr(meta, 'fields', fields) is None and
getattr(meta, 'exclude', exclude) is None):
raise ImproperlyConfigured(
"Calling modelformset_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
form = modelform_factory(model, form=form, fields=fields, exclude=exclude,
formfield_callback=formfield_callback,
widgets=widgets, localized_fields=localized_fields,
labels=labels, help_texts=help_texts, error_messages=error_messages)
FormSet = formset_factory(form, formset, extra=extra, min_num=min_num, max_num=max_num,
can_order=can_order, can_delete=can_delete,
validate_min=validate_min, validate_max=validate_max)
FormSet.model = model
return FormSet
# InlineFormSets #############################################################
class BaseInlineFormSet(BaseModelFormSet):
"""A formset for child objects related to a parent."""
def __init__(self, data=None, files=None, instance=None,
save_as_new=False, prefix=None, queryset=None, **kwargs):
if instance is None:
self.instance = self.fk.rel.to()
else:
self.instance = instance
self.save_as_new = save_as_new
if queryset is None:
queryset = self.model._default_manager
if self.instance.pk is not None:
qs = queryset.filter(**{self.fk.name: self.instance})
else:
qs = queryset.none()
super(BaseInlineFormSet, self).__init__(data, files, prefix=prefix,
queryset=qs, **kwargs)
def initial_form_count(self):
if self.save_as_new:
return 0
return super(BaseInlineFormSet, self).initial_form_count()
def _construct_form(self, i, **kwargs):
form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs)
if self.save_as_new:
# Remove the primary key from the form's data, we are only
# creating new instances
form.data[form.add_prefix(self._pk_field.name)] = None
# Remove the foreign key from the form's data
form.data[form.add_prefix(self.fk.name)] = None
# Set the fk value here so that the form can do its validation.
fk_value = self.instance.pk
if self.fk.rel.field_name != self.fk.rel.to._meta.pk.name:
fk_value = getattr(self.instance, self.fk.rel.field_name)
fk_value = getattr(fk_value, 'pk', fk_value)
setattr(form.instance, self.fk.get_attname(), fk_value)
return form
@classmethod
def get_default_prefix(cls):
return cls.fk.rel.get_accessor_name(model=cls.model).replace('+', '')
def save_new(self, form, commit=True):
# Use commit=False so we can assign the parent key afterwards, then
# save the object.
obj = form.save(commit=False)
pk_value = getattr(self.instance, self.fk.rel.field_name)
setattr(obj, self.fk.get_attname(), getattr(pk_value, 'pk', pk_value))
if commit:
obj.save()
# form.save_m2m() can be called via the formset later on if commit=False
if commit and hasattr(form, 'save_m2m'):
form.save_m2m()
return obj
def add_fields(self, form, index):
super(BaseInlineFormSet, self).add_fields(form, index)
if self._pk_field == self.fk:
name = self._pk_field.name
kwargs = {'pk_field': True}
else:
# The foreign key field might not be on the form, so we poke at the
# Model field to get the label, since we need that for error messages.
name = self.fk.name
kwargs = {
'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name))
}
if self.fk.rel.field_name != self.fk.rel.to._meta.pk.name:
kwargs['to_field'] = self.fk.rel.field_name
form.fields[name] = InlineForeignKeyField(self.instance, **kwargs)
# Add the generated field to form._meta.fields if it's defined to make
# sure validation isn't skipped on that field.
if form._meta.fields:
if isinstance(form._meta.fields, tuple):
form._meta.fields = list(form._meta.fields)
form._meta.fields.append(self.fk.name)
def get_unique_error_message(self, unique_check):
unique_check = [field for field in unique_check if field != self.fk.name]
return super(BaseInlineFormSet, self).get_unique_error_message(unique_check)
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
"""
Finds and returns the ForeignKey from model to parent if there is one
(returns None if can_fail is True and no such field exists). If fk_name is
provided, assume it is the name of the ForeignKey field. Unless can_fail is
True, an exception is raised if there is no ForeignKey from model to
parent_model.
"""
# avoid circular import
from django.db.models import ForeignKey
opts = model._meta
if fk_name:
fks_to_parent = [f for f in opts.fields if f.name == fk_name]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
if not isinstance(fk, ForeignKey) or \
(fk.rel.to != parent_model and
fk.rel.to not in parent_model._meta.get_parent_list()):
raise ValueError(
"fk_name '%s' is not a ForeignKey to '%s.%s'."
% (fk_name, parent_model._meta.app_label, parent_model._meta.object_name))
elif len(fks_to_parent) == 0:
raise ValueError(
"'%s.%s' has no field named '%s'."
% (model._meta.app_label, model._meta.object_name, fk_name))
else:
# Try to discover what the ForeignKey from model to parent_model is
fks_to_parent = [
f for f in opts.fields
if isinstance(f, ForeignKey)
and (f.rel.to == parent_model
or f.rel.to in parent_model._meta.get_parent_list())
]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
elif len(fks_to_parent) == 0:
if can_fail:
return
raise ValueError(
"'%s.%s' has no ForeignKey to '%s.%s'." % (
model._meta.app_label,
model._meta.object_name,
parent_model._meta.app_label,
parent_model._meta.object_name,
)
)
else:
raise ValueError(
"'%s.%s' has more than one ForeignKey to '%s.%s'." % (
model._meta.app_label,
model._meta.object_name,
parent_model._meta.app_label,
parent_model._meta.object_name,
)
)
return fk
def inlineformset_factory(parent_model, model, form=ModelForm,
formset=BaseInlineFormSet, fk_name=None,
fields=None, exclude=None, extra=3, can_order=False,
can_delete=True, max_num=None, formfield_callback=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False):
"""
Returns an ``InlineFormSet`` for the given kwargs.
You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey``
to ``parent_model``.
"""
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
'can_order': can_order,
'fields': fields,
'exclude': exclude,
'min_num': min_num,
'max_num': max_num,
'widgets': widgets,
'validate_min': validate_min,
'validate_max': validate_max,
'localized_fields': localized_fields,
'labels': labels,
'help_texts': help_texts,
'error_messages': error_messages,
}
FormSet = modelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
# Fields #####################################################################
class InlineForeignKeyField(Field):
"""
A basic integer field that deals with validating the given value to a
given parent instance in an inline.
"""
widget = HiddenInput
default_error_messages = {
'invalid_choice': _('The inline foreign key did not match the parent instance primary key.'),
}
def __init__(self, parent_instance, *args, **kwargs):
self.parent_instance = parent_instance
self.pk_field = kwargs.pop("pk_field", False)
self.to_field = kwargs.pop("to_field", None)
if self.parent_instance is not None:
if self.to_field:
kwargs["initial"] = getattr(self.parent_instance, self.to_field)
else:
kwargs["initial"] = self.parent_instance.pk
kwargs["required"] = False
super(InlineForeignKeyField, self).__init__(*args, **kwargs)
def clean(self, value):
if value in self.empty_values:
if self.pk_field:
return None
# if there is no value act as we did before.
return self.parent_instance
# ensure the we compare the values as equal types.
if self.to_field:
orig = getattr(self.parent_instance, self.to_field)
else:
orig = self.parent_instance.pk
if force_text(value) != force_text(orig):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return self.parent_instance
def has_changed(self, initial, data):
return False
class ModelChoiceIterator(object):
def __init__(self, field):
self.field = field
self.queryset = field.queryset
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
if self.field.cache_choices:
if self.field.choice_cache is None:
self.field.choice_cache = [
self.choice(obj) for obj in self.queryset.iterator()
]
for choice in self.field.choice_cache:
yield choice
else:
for obj in self.queryset.iterator():
yield self.choice(obj)
def __len__(self):
return (len(self.queryset) +
(1 if self.field.empty_label is not None else 0))
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
class ModelChoiceField(ChoiceField):
"""A ChoiceField whose choices are a model QuerySet."""
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _('Select a valid choice. That choice is not one of'
' the available choices.'),
}
def __init__(self, queryset, empty_label="---------", cache_choices=None,
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, limit_choices_to=None,
*args, **kwargs):
if required and (initial is not None):
self.empty_label = None
else:
self.empty_label = empty_label
if cache_choices is not None:
warnings.warn("cache_choices has been deprecated and will be "
"removed in Django 1.9.",
RemovedInDjango19Warning, stacklevel=2)
else:
cache_choices = False
self.cache_choices = cache_choices
# Call Field instead of ChoiceField __init__() because we don't need
# ChoiceField.__init__().
Field.__init__(self, required, widget, label, initial, help_text,
*args, **kwargs)
self.queryset = queryset
self.limit_choices_to = limit_choices_to # limit the queryset later.
self.choice_cache = None
self.to_field_name = to_field_name
def get_limit_choices_to(self):
"""
Returns ``limit_choices_to`` for this form field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.limit_choices_to):
return self.limit_choices_to()
return self.limit_choices_to
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
# Need to force a new ModelChoiceIterator to be created, bug #11183
result.queryset = result.queryset
return result
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
# this method will be used to create object labels by the QuerySetIterator.
# Override it to customize the label.
def label_from_instance(self, obj):
"""
This method is used to convert objects into strings; it's used to
generate the labels for the choices presented by this object. Subclasses
can override this method to customize the display of the choices.
"""
return smart_text(obj)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return ModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
def prepare_value(self, value):
if hasattr(value, '_meta'):
if self.to_field_name:
return value.serializable_value(self.to_field_name)
else:
return value.pk
return super(ModelChoiceField, self).prepare_value(value)
def to_python(self, value):
if value in self.empty_values:
return None
try:
key = self.to_field_name or 'pk'
value = self.queryset.get(**{key: value})
except (ValueError, TypeError, self.queryset.model.DoesNotExist):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return value
def validate(self, value):
return Field.validate(self, value)
def has_changed(self, initial, data):
initial_value = initial if initial is not None else ''
data_value = data if data is not None else ''
return force_text(self.prepare_value(initial_value)) != force_text(data_value)
class ModelMultipleChoiceField(ModelChoiceField):
"""A MultipleChoiceField whose choices are a model QuerySet."""
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = {
'list': _('Enter a list of values.'),
'invalid_choice': _('Select a valid choice. %(value)s is not one of the'
' available choices.'),
'invalid_pk_value': _('"%(pk)s" is not a valid value for a primary key.')
}
def __init__(self, queryset, cache_choices=None, required=True,
widget=None, label=None, initial=None,
help_text='', *args, **kwargs):
super(ModelMultipleChoiceField, self).__init__(queryset, None,
cache_choices, required, widget, label, initial, help_text,
*args, **kwargs)
def to_python(self, value):
if not value:
return []
return list(self._check_values(value))
def clean(self, value):
if self.required and not value:
raise ValidationError(self.error_messages['required'], code='required')
elif not self.required and not value:
return self.queryset.none()
if not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['list'], code='list')
qs = self._check_values(value)
# Since this overrides the inherited ModelChoiceField.clean
# we run custom validators here
self.run_validators(value)
return qs
def _check_values(self, value):
"""
Given a list of possible PK values, returns a QuerySet of the
corresponding objects. Raises a ValidationError if a given value is
invalid (not a valid PK, not in the queryset, etc.)
"""
key = self.to_field_name or 'pk'
# deduplicate given values to avoid creating many querysets or
# requiring the database backend deduplicate efficiently.
try:
value = frozenset(value)
except TypeError:
# list of lists isn't hashable, for example
raise ValidationError(
self.error_messages['list'],
code='list',
)
for pk in value:
try:
self.queryset.filter(**{key: pk})
except (ValueError, TypeError):
raise ValidationError(
self.error_messages['invalid_pk_value'],
code='invalid_pk_value',
params={'pk': pk},
)
qs = self.queryset.filter(**{'%s__in' % key: value})
pks = set(force_text(getattr(o, key)) for o in qs)
for val in value:
if force_text(val) not in pks:
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': val},
)
return qs
def prepare_value(self, value):
if (hasattr(value, '__iter__') and
not isinstance(value, six.text_type) and
not hasattr(value, '_meta')):
return [super(ModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(ModelMultipleChoiceField, self).prepare_value(value)
def has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set(force_text(value) for value in self.prepare_value(initial))
data_set = set(force_text(value) for value in data)
return data_set != initial_set
def modelform_defines_fields(form_class):
return (form_class is not None and (
hasattr(form_class, '_meta') and
(form_class._meta.fields is not None or
form_class._meta.exclude is not None)
))
| runekaagaard/django-contrib-locking | django/forms/models.py | Python | bsd-3-clause | 54,740 |
#!/usr/bin/env python3
import sys, json
def crawl(obj):
if isinstance(obj, int) or isinstance(obj, float):
return obj
sum = 0
if isinstance(obj, list):
for a in obj:
sum += crawl(a)
if isinstance(obj, dict):
for a in obj.values():
sum += crawl(a)
return sum
def main(args):
shit = "\n".join([s.strip() for s in sys.stdin])
js = json.loads(shit)
print(crawl(js))
if __name__ == '__main__':
sys.exit(main(sys.argv))
| msullivan/advent-of-code | 2015/A12a.py | Python | mit | 504 |
# Copyright (c) 2019 Sebastian Wojciechowski.
#
# This file is part of Bodhi.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Remove legacy old_updateid.
Revision ID: f8a44498c806
Revises: 8e9dc57e082d
Create Date: 2019-01-11 18:34:04.277123
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f8a44498c806'
down_revision = '8e9dc57e082d'
def upgrade():
"""Remove database column old_updateid."""
op.drop_column('updates', 'old_updateid')
def downgrade():
"""Restore old_updateid field removed in the upgrade() function."""
op.add_column('updates', sa.Column('old_updateid', sa.Unicode(length=32), nullable=True))
| Conan-Kudo/bodhi | bodhi/server/migrations/versions/f8a44498c806_remove_legacy_old_updateid.py | Python | gpl-2.0 | 1,353 |
# -*- coding: utf-8 -*-
# Copyright 2016 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class HrEmployee(models.Model):
_inherit = 'hr.employee'
employee_number = fields.Char(copy=False)
@api.model
def create(self, vals):
def get_employee_sequence(Sequence):
number = Sequence.next_by_code('hr.employee') or '/'
return number
if vals.get('employee_number'):
searching = True
while searching:
number = get_employee_sequence(self.env['ir.sequence'])
if not self.search([
('employee_number', '=', number)
], limit=1):
vals['employee_number'] = number
searching = False
return super(HrEmployee, self).create(vals)
_sql_constraints = [
('employee_number',
'unique (employee_number)',
'Employee Number must be unique !')
]
| VitalPet/addons-onestein | hr_employee_number/models/hr_employee.py | Python | agpl-3.0 | 1,051 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import lxml.etree
from turbogears.database import session
from bkr.server.bexceptions import BX
from bkr.inttest import data_setup, DatabaseTestCase
class TestTasks(DatabaseTestCase):
def setUp(self):
session.begin()
from bkr.server.jobs import Jobs
self.controller = Jobs()
self.task = data_setup.create_task(name=u'/fake/task/here')
distro_tree = data_setup.create_distro_tree()
self.user = data_setup.create_user()
self.xmljob = lxml.etree.fromstring('''
<job>
<whiteboard>job with fake task</whiteboard>
<recipeSet>
<recipe>
<distroRequires>
<distro_name op="=" value="%s" />
</distroRequires>
<hostRequires/>
<task name="%s" role="STANDALONE">
<params/>
</task>
</recipe>
</recipeSet>
</job>
''' % (distro_tree.distro.name, self.task.name))
session.flush()
def tearDown(self):
session.rollback()
def test_enable_task(self):
self.task.valid = True
session.flush()
self.controller.process_xmljob(self.xmljob, self.user)
def test_disable_task(self):
self.task.valid = False
session.flush()
self.assertRaises(BX, lambda: self.controller.process_xmljob(self.xmljob, self.user))
| jtoppins/beaker | IntegrationTests/src/bkr/inttest/server/test_tasks.py | Python | gpl-2.0 | 1,769 |
__source__ = 'https://leetcode.com/problems/min-stack/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/min-stack.py
# Time: O(n)
# Space: O(1)
# Stack
#
# Description: Leetcode # 155. Min Stack
#
# Design a stack that supports push, pop, top, and retrieving the minimum element in constant time.
#
# push(x) -- Push element x onto stack.
# pop() -- Removes the element on top of the stack.
# top() -- Get the top element.
# getMin() -- Retrieve the minimum element in the stack.
# Example:
# MinStack minStack = new MinStack();
# minStack.push(-2);
# minStack.push(0);
# minStack.push(-3);
# minStack.getMin(); --> Returns -3.
# minStack.pop();
# minStack.top(); --> Returns 0.
# minStack.getMin(); --> Returns -2.
#
# Companies
# Google Uber Zenefits Amazon Snapchat Bloomberg
# Related Topics
# Stack Design
# Similar Questions
# Sliding Window Maximum
#
import unittest
class MinStack:
def __init__(self):
self.min = None
self.stack = []
# @param x, an integer
# @return an integer
def push(self, x):
if not self.stack:
self.stack.append(x)
self.min = x
else:
if x < self.min:
self.min = x
# @return nothing
def pop(self):
x = self.stack.pop()
if x < 0:
self.min = self.min - x
# @return an integer
def top(self):
x = self.stack[-1]
#print self.min, x
if x > 0 : # very weired, why care x > 0?
return x + self.min
else:
return self.min
# @return an integer
def getMin(self):
return self.min
class MinStackOther:
def __init__(self):
self.stack = []
self.minStack = []
#self.minStack.append(0)
# @param x, an integer
# @return an integer
def push(self, x):
self.stack.append(x)
if len(self.minStack) == 0 or self.minStack[-1] >= x:
self.minStack.append(x)
# @return nothing
def pop(self):
p = self.stack.pop()
if p == self.minStack[-1]:
self.minStack.pop()
# @return an integer
def top(self):
return self.stack[-1]
# @return an integer
def getMin(self):
return self.minStack[-1]
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
stack = MinStack()
stack.push(1)
stack.push(15)
stack.push(3)
print [stack.top(), stack.getMin()]
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
Input:
["MinStack","push","push","push","getMin","pop","getMin"]
[[],[0],[1],[0],[],[],[]]
Output:
[null,null,null,null,0,null,1]
Expected:
[null,null,null,null,0,null,0]
# use only one stack:
# pop min twice
# 54ms 100%
class MinStack {
private Stack<Integer> stack;
private int min;
/** initialize your data structure here. */
public MinStack() {
stack = new Stack<>();
min = Integer.MAX_VALUE;
}
public void push(int x) {
if (x <= min) {
stack.push(min);
min = x;
}
stack.push(x);
}
public void pop() {
if (min == stack.pop()) {
min = stack.pop();
}
}
public int top() {
return stack.peek();
}
public int getMin() {
return min;
}
}
# 71ms 49.15%
class MinStack {
Stack<Integer> dataStack;
Stack<Integer> minStack;
/** initialize your data structure here. */
public MinStack() {
dataStack = new Stack<>();
minStack = new Stack<>();
}
public void push(int x) {
dataStack.push(x);
if (minStack.isEmpty() || minStack.peek() >= x) {
minStack.push(x);
}
}
public void pop() {
if (minStack.peek().equals(dataStack.pop())) {
minStack.pop();
}
}
public int top() {
return dataStack.peek();
}
public int getMin() {
return minStack.peek();
}
}
/**
* Your MinStack object will be instantiated and called as such:
* MinStack obj = new MinStack();
* obj.push(x);
* obj.pop();
* int param_3 = obj.top();
* int param_4 = obj.getMin();
*/
# 73ms 45.84%
class MinStack {
Stack<Integer> stack;
Stack<Integer> min;
/** initialize your data structure here. */
public MinStack() {
this.stack = new Stack<Integer>();
this.min = new Stack<Integer>();
}
public void push(int x) {
this.stack.push(x);
if (this.min.isEmpty() || this.min.peek() >= x) {
this.min.push(x);
}
}
public void pop() {
int val = this.stack.pop();
if (this.min.peek() == val) {
this.min.pop();
}
}
public int top() {
return this.stack.peek();
}
public int getMin() {
return this.min.peek();
}
}
'''
| JulyKikuAkita/PythonPrac | cs15211/MinStack.py | Python | apache-2.0 | 4,930 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""`LinearOperator` that wraps a [batch] matrix."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.linalg import linear_operator
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.util.tf_export import tf_export
__all__ = ["LinearOperatorFullMatrix"]
@tf_export("linalg.LinearOperatorFullMatrix")
class LinearOperatorFullMatrix(linear_operator.LinearOperator):
"""`LinearOperator` that wraps a [batch] matrix.
This operator wraps a [batch] matrix `A` (which is a `Tensor`) with shape
`[B1,...,Bb, M, N]` for some `b >= 0`. The first `b` indices index a
batch member. For every batch index `(i1,...,ib)`, `A[i1,...,ib, : :]` is
an `M x N` matrix.
```python
# Create a 2 x 2 linear operator.
matrix = [[1., 2.], [3., 4.]]
operator = LinearOperatorFullMatrix(matrix)
operator.to_dense()
==> [[1., 2.]
[3., 4.]]
operator.shape
==> [2, 2]
operator.log_abs_determinant()
==> scalar Tensor
x = ... Shape [2, 4] Tensor
operator.matmul(x)
==> Shape [2, 4] Tensor
# Create a [2, 3] batch of 4 x 4 linear operators.
matrix = tf.random_normal(shape=[2, 3, 4, 4])
operator = LinearOperatorFullMatrix(matrix)
```
#### Shape compatibility
This operator acts on [batch] matrix with compatible shape.
`x` is a batch matrix with compatible shape for `matmul` and `solve` if
```
operator.shape = [B1,...,Bb] + [M, N], with b >= 0
x.shape = [B1,...,Bb] + [N, R], with R >= 0.
```
#### Performance
`LinearOperatorFullMatrix` has exactly the same performance as would be
achieved by using standard `TensorFlow` matrix ops. Intelligent choices are
made based on the following initialization hints.
* If `dtype` is real, and `is_self_adjoint` and `is_positive_definite`, a
Cholesky factorization is used for the determinant and solve.
In all cases, suppose `operator` is a `LinearOperatorFullMatrix` of shape
`[M, N]`, and `x.shape = [N, R]`. Then
* `operator.matmul(x)` is `O(M * N * R)`.
* If `M=N`, `operator.solve(x)` is `O(N^3 * R)`.
* If `M=N`, `operator.determinant()` is `O(N^3)`.
If instead `operator` and `x` have shape `[B1,...,Bb, M, N]` and
`[B1,...,Bb, N, R]`, every operation increases in complexity by `B1*...*Bb`.
#### Matrix property hints
This `LinearOperator` is initialized with boolean flags of the form `is_X`,
for `X = non_singular, self_adjoint, positive_definite, square`.
These have the following meaning:
* If `is_X == True`, callers should expect the operator to have the
property `X`. This is a promise that should be fulfilled, but is *not* a
runtime assert. For example, finite floating point precision may result
in these promises being violated.
* If `is_X == False`, callers should expect the operator to not have `X`.
* If `is_X == None` (the default), callers should have no expectation either
way.
"""
def __init__(self,
matrix,
is_non_singular=None,
is_self_adjoint=None,
is_positive_definite=None,
is_square=None,
name="LinearOperatorFullMatrix"):
r"""Initialize a `LinearOperatorFullMatrix`.
Args:
matrix: Shape `[B1,...,Bb, M, N]` with `b >= 0`, `M, N >= 0`.
Allowed dtypes: `float16`, `float32`, `float64`, `complex64`,
`complex128`.
is_non_singular: Expect that this operator is non-singular.
is_self_adjoint: Expect that this operator is equal to its hermitian
transpose.
is_positive_definite: Expect that this operator is positive definite,
meaning the quadratic form `x^H A x` has positive real part for all
nonzero `x`. Note that we do not require the operator to be
self-adjoint to be positive-definite. See:
https://en.wikipedia.org/wiki/Positive-definite_matrix#Extension_for_non-symmetric_matrices
is_square: Expect that this operator acts like square [batch] matrices.
name: A name for this `LinearOperator`.
Raises:
TypeError: If `diag.dtype` is not an allowed type.
"""
with ops.name_scope(name, values=[matrix]):
self._matrix = ops.convert_to_tensor(matrix, name="matrix")
self._check_matrix(self._matrix)
super(LinearOperatorFullMatrix, self).__init__(
dtype=self._matrix.dtype,
graph_parents=[self._matrix],
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
name=name)
def _check_matrix(self, matrix):
"""Static check of the `matrix` argument."""
allowed_dtypes = [
dtypes.float16,
dtypes.float32,
dtypes.float64,
dtypes.complex64,
dtypes.complex128,
]
matrix = ops.convert_to_tensor(matrix, name="matrix")
dtype = matrix.dtype
if dtype not in allowed_dtypes:
raise TypeError(
"Argument matrix must have dtype in %s. Found: %s"
% (allowed_dtypes, dtype))
if matrix.get_shape().ndims is not None and matrix.get_shape().ndims < 2:
raise ValueError(
"Argument matrix must have at least 2 dimensions. Found: %s"
% matrix)
def _shape(self):
return self._matrix.get_shape()
def _shape_tensor(self):
return array_ops.shape(self._matrix)
def _matmul(self, x, adjoint=False, adjoint_arg=False):
return linear_operator_util.matmul_with_broadcast(
self._matrix, x, adjoint_a=adjoint, adjoint_b=adjoint_arg)
def _to_dense(self):
return self._matrix
| benoitsteiner/tensorflow-xsmm | tensorflow/python/ops/linalg/linear_operator_full_matrix.py | Python | apache-2.0 | 6,537 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
AutoincrementalField.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import QVariant
from qgis.core import QgsField, QgsFeature, QgsGeometry
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
class AutoincrementalField(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
def processAlgorithm(self, progress):
output = self.getOutputFromName(self.OUTPUT)
vlayer = \
dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
vprovider = vlayer.dataProvider()
fields = vprovider.fields()
fields.append(QgsField('AUTO', QVariant.Int))
writer = output.getVectorWriter(fields, vprovider.geometryType(),
vlayer.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
nElement = 0
features = vector.features(vlayer)
nFeat = len(features)
for inFeat in features:
progress.setPercentage(int(100 * nElement / nFeat))
nElement += 1
inGeom = inFeat.geometry()
outFeat.setGeometry(inGeom)
attrs = inFeat.attributes()
attrs.append(nElement)
outFeat.setAttributes(attrs)
writer.addFeature(outFeat)
del writer
def defineCharacteristics(self):
self.name = 'Add autoincremental field'
self.group = 'Vector table tools'
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Incremented')))
| dracos/QGIS | python/plugins/processing/algs/qgis/AutoincrementalField.py | Python | gpl-2.0 | 2,809 |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'GameTree.views.home', name='home'),
# url(r'^GameTree/', include('GameTree.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| yeaske/GameTree | GameTree/urls.py | Python | apache-2.0 | 562 |
from django.conf.urls import url
from fundraiser_app import views
urlpatterns = [
url(r'^$', views.FMItemListView.as_view(), name='fmitem_list'),
url(r'^about/$', views.AboutView.as_view(), name='about'),
url(r'^fmitem/(?P<pk>\d+)$', views.FMItemDetailView.as_view(), name='fmitem_detail'),
url(r'^fmitem/new$', views.FMItemCreateView.as_view(), name='fmitem_new'),
url(r'^fmitem/(?P<pk>\d+)/edit$', views.FMItemUpdateView.as_view(), name='fmitem_edit'),
url(r'^fmitem/(?P<pk>\d+)/remove$', views.FMItemDeleteView.as_view(), name='fmitem_remove'),
url(r'^fmitem/(?P<pk>\d+)/publish/$', views.fmitem_publish, name='fmitem_publish'),
]
| CarlGraff/fundraisermemorial | fundraiser_app/urls.py | Python | mit | 663 |
from OpenGLCffi.GL import params
@params(api='gl', prms=['location', 'x'])
def glUniform1i64NV(location, x):
pass
@params(api='gl', prms=['location', 'x', 'y'])
def glUniform2i64NV(location, x, y):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z'])
def glUniform3i64NV(location, x, y, z):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z', 'w'])
def glUniform4i64NV(location, x, y, z, w):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform1i64vNV(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform2i64vNV(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform3i64vNV(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform4i64vNV(location, count, value):
pass
@params(api='gl', prms=['location', 'x'])
def glUniform1ui64NV(location, x):
pass
@params(api='gl', prms=['location', 'x', 'y'])
def glUniform2ui64NV(location, x, y):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z'])
def glUniform3ui64NV(location, x, y, z):
pass
@params(api='gl', prms=['location', 'x', 'y', 'z', 'w'])
def glUniform4ui64NV(location, x, y, z, w):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform1ui64vNV(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform2ui64vNV(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform3ui64vNV(location, count, value):
pass
@params(api='gl', prms=['location', 'count', 'value'])
def glUniform4ui64vNV(location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'params'])
def glGetUniformi64vNV(program, location, params):
pass
@params(api='gl', prms=['program', 'location', 'params'])
def glGetUniformui64vNV(program, location, params):
pass
@params(api='gl', prms=['program', 'location', 'x'])
def glProgramUniform1i64NV(program, location, x):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y'])
def glProgramUniform2i64NV(program, location, x, y):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z'])
def glProgramUniform3i64NV(program, location, x, y, z):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z', 'w'])
def glProgramUniform4i64NV(program, location, x, y, z, w):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform1i64vNV(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform2i64vNV(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform3i64vNV(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform4i64vNV(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'x'])
def glProgramUniform1ui64NV(program, location, x):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y'])
def glProgramUniform2ui64NV(program, location, x, y):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z'])
def glProgramUniform3ui64NV(program, location, x, y, z):
pass
@params(api='gl', prms=['program', 'location', 'x', 'y', 'z', 'w'])
def glProgramUniform4ui64NV(program, location, x, y, z, w):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform1ui64vNV(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform2ui64vNV(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform3ui64vNV(program, location, count, value):
pass
@params(api='gl', prms=['program', 'location', 'count', 'value'])
def glProgramUniform4ui64vNV(program, location, count, value):
pass
| cydenix/OpenGLCffi | OpenGLCffi/GL/EXT/AMD/gpu_shader_int64.py | Python | mit | 3,996 |
#!/usr/bin/env python
import json
import os
import stat
from Pegasus.DAX3 import ADAG, Job, File, Link
class DockerApp(object):
def __init__(self, appfile, map, transfer=False):
self.data = json.load(open(appfile))
self.map = map
self.transfer = transfer
if not os.path.exists("wrappers"):
os.mkdir("wrappers")
def generate_job(self):
job = Job(self.data["name"], node_label=self.data["name"])
# Outputs parsing
for output in self.data["outputs"]:
if output["command-line-key"]:
if "value-template" in output and output["value-template"]:
self.data["command-line"] = self.data["command-line"].replace(output["command-line-key"], output["value-template"])
else:
self.data["command-line"] = self.data["command-line"].replace(output["command-line-key"], self.map[output["name"]])
if output["type"] == "File":
job.uses(self.map[output["name"]], link=Link.OUTPUT, transfer=self.transfer)
# Inputs parsing
inputsMap = {}
for input in self.data["inputs"]:
if input["command-line-key"]:
self.data["command-line"] = self.data["command-line"].replace(input["command-line-key"], self.map[input["name"]])
inputsMap[input["command-line-key"]] = self.map[input["name"]]
if input["type"] == "File":
job.uses(self.map[input["name"]], link=Link.INPUT)
# Outputs value-template parsing
for output in self.data["outputs"]:
if "value-template" in output and output["value-template"]:
for input in inputsMap:
if input in output["value-template"]:
value = output["value-template"].replace(input, inputsMap[input])
job.uses(value, link=Link.OUTPUT, transfer=self.transfer)
break
self.create_wrapper()
return job
def create_wrapper(self):
f = open("wrappers/%s" % self.data["name"], "w")
f.write("#!/bin/bash\n")
f.write("PWD=`pwd`\n")
f.write("docker run -v $PWD:/scratch -w=/scratch -t %s %s\n" % (self.data["docker-image"], self.data["command-line"]))
f.close()
st = os.stat("wrappers/%s" % self.data["name"])
os.chmod("wrappers/%s" % self.data["name"], st.st_mode | stat.S_IEXEC)
| boutiques/schema | tools/pegasus/dockerapp.py | Python | gpl-2.0 | 2,102 |
import lxml.html
from .bills import NHBillScraper
from .legislators import NHLegislatorScraper
from .committees import NHCommitteeScraper
metadata = {
'abbreviation': 'nh',
'name': 'New Hampshire',
'capitol_timezone': 'America/New_York',
'legislature_name': 'New Hampshire General Court',
'legislature_url': 'http://www.gencourt.state.nh.us/',
'chambers': {
'upper': {'name': 'Senate', 'title': 'Senator'},
'lower': {'name': 'House', 'title': 'Representative'},
},
'terms': [
{'name': '2011-2012', 'sessions': ['2011', '2012'],
'start_year': 2011, 'end_year': 2012},
{'name': '2013-2014', 'sessions': ['2013', '2014'],
'start_year': 2013, 'end_year': 2014},
{'name': '2015-2016', 'sessions': ['2015', '2016'],
'start_year': 2015, 'end_year': 2016},
{'name': '2017-2018', 'sessions': ['2017'],
'start_year': 2017, 'end_year': 2018}
],
'session_details': {
'2011': {'display_name': '2011 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2011%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2011 Session',
},
'2012': {'display_name': '2012 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2012%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2012 Session',
},
'2013': {'display_name': '2013 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2013%20Session%20Bill%20Status%20Tables.zip',
# Their dump filename changed, probably just a hiccup.
'_scraped_name': '2013',
# '_scraped_name': '2013 Session',
},
'2014': {'display_name': '2014 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2014%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2014 Session',
},
'2015': {'display_name': '2015 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2015%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2015 Session',
},
'2016': {'display_name': '2016 Regular Session',
'zip_url': 'http://gencourt.state.nh.us/downloads/2016%20Session%20Bill%20Status%20Tables.zip',
'_scraped_name': '2016 Session',
},
'2017': {'display_name': '2017 Regular Session',
'_scraped_name': '2017 Session',
},
},
'feature_flags': ['subjects', 'influenceexplorer'],
'_ignored_scraped_sessions': ['2013 Session','2017 Session Bill Status Tables Link.txt'],
}
def session_list():
from billy.scrape.utils import url_xpath
zips = url_xpath('http://gencourt.state.nh.us/downloads/',
'//a[contains(@href, "Bill%20Status%20Tables")]/text()')
return [zip.replace(' Bill Status Tables.zip', '') for zip in zips]
def extract_text(doc, data):
doc = lxml.html.fromstring(data)
return doc.xpath('//html')[0].text_content()
| cliftonmcintosh/openstates | openstates/nh/__init__.py | Python | gpl-3.0 | 3,210 |
#!/usr/bin/env python
###############################################################################
# #
# stackedBarGraph.py - code for creating purdy stacked bar graphs #
# #
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
__author__ = "Michael Imelfort"
__copyright__ = "Copyright 2014"
__credits__ = ["Michael Imelfort"]
__license__ = "GPL3"
__version__ = "0.0.1"
__maintainer__ = "Michael Imelfort"
__email__ = "mike@mikeimelfort.com"
__status__ = "Development"
###############################################################################
import numpy as np
from matplotlib import pyplot as plt
###############################################################################
class StackedBarGrapher:
"""Container class"""
def __init__(self): pass
def demo(self):
d = np.array([[101.,0.,0.,0.,0.,0.,0.],
[92.,3.,0.,4.,5.,6.,0.],
[56.,7.,8.,9.,23.,4.,5.],
[81.,2.,4.,5.,32.,33.,4.],
[0.,45.,2.,3.,45.,67.,8.],
[99.,5.,0.,0.,0.,43.,56.]])
d_heights = [1.,2.,3.,4.,5.,6.]
d_widths = [.5,1.,3.,2.,1.,2.]
d_labels = ["fred","julie","sam","peter","rob","baz"]
d_colors = ['#2166ac', '#fee090', '#fdbb84', '#fc8d59', '#e34a33', '#b30000', '#777777']
gap = 0.05
fig = plt.figure()
ax1 = fig.add_subplot(321)
self.stackedBarPlot(ax1,
d,
d_colors,
edgeCols=['#000000']*7,
xLabels=d_labels,
)
plt.title("Straight up stacked bars")
ax2 = fig.add_subplot(322)
self.stackedBarPlot(ax2,
d,
d_colors,
edgeCols=['#000000']*7,
xLabels=d_labels,
scale=True
)
plt.title("Scaled bars")
ax3 = fig.add_subplot(323)
self.stackedBarPlot(ax3,
d,
d_colors,
edgeCols=['#000000']*7,
xLabels=d_labels,
heights=d_heights,
yTicks=7,
)
plt.title("Bars with set heights")
ax4 = fig.add_subplot(324)
self.stackedBarPlot(ax4,
d,
d_colors,
edgeCols=['#000000']*7,
xLabels=d_labels,
yTicks=7,
widths=d_widths,
scale=True
)
plt.title("Scaled bars with set widths")
ax5 = fig.add_subplot(325)
self.stackedBarPlot(ax5,
d,
d_colors,
edgeCols=['#000000']*7,
xLabels=d_labels,
gap=gap
)
plt.title("Straight up stacked bars + gaps")
ax6 = fig.add_subplot(326)
self.stackedBarPlot(ax6,
d,
d_colors,
edgeCols=['#000000']*7,
xLabels=d_labels,
scale=True,
gap=gap,
endGaps=True
)
plt.title("Scaled bars + gaps + end gaps")
# We change the fontsize of minor ticks label
fig.subplots_adjust(bottom=0.4)
plt.tight_layout()
plt.show()
plt.close(fig)
del fig
def stackedBarPlot(self,
ax, # axes to plot onto
data, # data to plot
cols, # colors for each level
xLabels = None, # bar specific labels
yTicks = 6., # information used for making y ticks ["none", <int> or [[tick_pos1, tick_pos2, ... ],[tick_label_1, tick_label2, ...]]
edgeCols=None, # colors for edges
showFirst=-1, # only plot the first <showFirst> bars
scale=False, # scale bars to same height
widths=None, # set widths for each bar
heights=None, # set heights for each bar
ylabel='', # label for x axis
xlabel='', # label for y axis
gap=0., # gap between bars
endGaps=False, # allow gaps at end of bar chart (only used if gaps != 0.)
seriesLabels=None
):
#------------------------------------------------------------------------------
# data fixeratering
# make sure this makes sense
if showFirst != -1:
showFirst = np.min([showFirst, np.shape(data)[0]])
data_copy = np.copy(data[:showFirst]).transpose().astype('float')
data_shape = np.shape(data_copy)
if heights is not None:
heights = heights[:showFirst]
if widths is not None:
widths = widths[:showFirst]
showFirst = -1
else:
data_copy = np.copy(data).transpose()
data_shape = np.shape(data_copy)
# determine the number of bars and corresponding levels from the shape of the data
num_bars = data_shape[1]
levels = data_shape[0]
if widths is None:
widths = np.array([1] * num_bars)
x = np.arange(num_bars)
else:
x = [0]
for i in range(1, len(widths)):
x.append(x[i-1] + (widths[i-1] + widths[i])/2)
# stack the data --
# replace the value in each level by the cumulative sum of all preceding levels
data_stack = np.reshape([float(i) for i in np.ravel(np.cumsum(data_copy, axis=0))], data_shape)
# scale the data is needed
if scale:
data_copy /= data_stack[levels-1]
data_stack /= data_stack[levels-1]
if heights is not None:
print "WARNING: setting scale and heights does not make sense."
heights = None
elif heights is not None:
data_copy /= data_stack[levels-1]
data_stack /= data_stack[levels-1]
for i in np.arange(num_bars):
data_copy[:,i] *= heights[i]
data_stack[:,i] *= heights[i]
#------------------------------------------------------------------------------
# ticks
if yTicks is not "none":
# it is either a set of ticks or the number of auto ticks to make
real_ticks = True
try:
k = len(yTicks[1])
except:
real_ticks = False
if not real_ticks:
yTicks = float(yTicks)
if scale:
# make the ticks line up to 100 %
y_ticks_at = np.arange(yTicks)/(yTicks-1)
y_tick_labels = np.array(["%0.2f"%(i * 100) for i in y_ticks_at])
else:
# space the ticks along the y axis
y_ticks_at = np.arange(yTicks)/(yTicks-1)*np.max(data_stack)
y_tick_labels = np.array([str(i) for i in y_ticks_at])
yTicks=(y_ticks_at, y_tick_labels)
#------------------------------------------------------------------------------
# plot
if edgeCols is None:
edgeCols = ["none"]*len(cols)
# take cae of gaps
gapd_widths = [i - gap for i in widths]
# bars
ax.bar(x,
data_stack[0],
color=cols[0],
edgecolor=edgeCols[0],
width=gapd_widths,
linewidth=0.5,
align='center',
label = seriesLabels[0]
)
for i in np.arange(1,levels):
ax.bar(x,
data_copy[i],
bottom=data_stack[i-1],
color=cols[i],
edgecolor=edgeCols[i],
width=gapd_widths,
linewidth=0.5,
align='center',
label = seriesLabels[i]
)
# borders
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
ax.spines["bottom"].set_visible(False)
ax.spines["left"].set_visible(False)
# make ticks if necessary
if yTicks is not "none":
ax.tick_params(axis='y', which='both', labelsize=8, direction="out")
ax.yaxis.tick_left()
plt.yticks(yTicks[0], yTicks[1])
else:
plt.yticks([], [])
if xLabels is not None:
ax.tick_params(axis='x', which='both', labelsize=8, direction="out")
ax.xaxis.tick_bottom()
plt.xticks(x, xLabels, rotation='vertical')
else:
plt.xticks([], [])
# limits
if endGaps:
ax.set_xlim(-1.*widths[0]/2. - gap/2., np.sum(widths)-widths[0]/2. + gap/2.)
else:
ax.set_xlim(-1.*widths[0]/2. + gap/2., np.sum(widths)-widths[0]/2. - gap/2.)
ax.set_ylim(0, yTicks[0][-1])#np.max(data_stack))
# labels
if xlabel != '':
plt.xlabel(xlabel)
if ylabel != '':
plt.ylabel(ylabel)
###############################################################################
###############################################################################
###############################################################################
###############################################################################
if __name__ == '__main__':
SBG = StackedBarGrapher()
SBG.demo()
###############################################################################
###############################################################################
###############################################################################
############################################################################### | lionelBytes/CSERF | stacked_bar_graph.py | Python | gpl-2.0 | 12,103 |
#!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import android_commands
import constants
import logging
import os
import run_tests_helper
import subprocess
import time
class FakeDns(object):
"""Wrapper class for the fake_dns tool."""
_FAKE_DNS_PATH = constants.TEST_EXECUTABLE_DIR + '/fake_dns'
def __init__(self, adb):
"""
Args:
adb: the AndroidCommands to use.
"""
self._adb = adb
self._fake_dns = None
self._original_dns = None
def _PushAndStartFakeDns(self):
"""Starts the fake_dns server that replies all name queries 127.0.0.1.
Returns:
subprocess instance connected to the fake_dns process on the device.
"""
self._adb.PushIfNeeded(
os.path.join(run_tests_helper.CHROME_DIR, 'out', 'Release', 'fake_dns'),
FakeDns._FAKE_DNS_PATH)
return subprocess.Popen(
['adb', '-s', self._adb._adb.GetSerialNumber(),
'shell', '%s -D' % FakeDns._FAKE_DNS_PATH])
def SetUp(self):
"""Configures the system to point to a DNS server that replies 127.0.0.1.
This can be used in combination with the forwarder to forward all web
traffic to a replay server.
The TearDown() method will perform all cleanup.
"""
self._adb.RunShellCommand('ip route add 8.8.8.0/24 via 127.0.0.1 dev lo')
self._fake_dns = self._PushAndStartFakeDns()
self._original_dns = self._adb.RunShellCommand('getprop net.dns1')[0]
self._adb.RunShellCommand('setprop net.dns1 127.0.0.1')
time.sleep(2) # Time for server to start and the setprop to take effect.
def TearDown(self):
"""Shuts down the fake_dns."""
if self._fake_dns:
if not self._original_dns or self._original_dns == '127.0.0.1':
logging.warning('Bad original DNS, falling back to Google DNS.')
self._original_dns = '8.8.8.8'
self._adb.RunShellCommand('setprop net.dns1 %s' % self._original_dns)
self._fake_dns.kill()
self._adb.RunShellCommand('ip route del 8.8.8.0/24 via 127.0.0.1 dev lo')
| paul99/clank | build/android/fake_dns.py | Python | bsd-3-clause | 2,149 |
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'EmailBox'
db.create_table('messaging_emailbox', (
('object_ptr', self.gf('django.db.models.fields.related.OneToOneField')(
to=orm['core.Object'], unique=True, primary_key=True)),
('email_name', self.gf(
'django.db.models.fields.CharField')(max_length=255)),
('email_type', self.gf(
'django.db.models.fields.CharField')(max_length=255)),
('server_name', self.gf(
'django.db.models.fields.CharField')(max_length=255)),
('server_type', self.gf(
'django.db.models.fields.CharField')(max_length=255)),
('server_username', self.gf(
'django.db.models.fields.CharField')(max_length=255)),
('server_password', self.gf(
'django.db.models.fields.CharField')(max_length=255)),
('last_checked', self.gf('django.db.models.fields.DateTimeField')
(null=True, blank=True)),
))
db.send_create_signal('messaging', ['EmailBox'])
# Adding model 'MessageStream'
db.create_table('messaging_messagestream', (
('object_ptr', self.gf('django.db.models.fields.related.OneToOneField')(
to=orm['core.Object'], unique=True, primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')
(max_length=255)),
('email_incoming', self.gf('django.db.models.fields.related.ForeignKey')(
blank=True, related_name='incoming', null=True, to=orm['messaging.EmailBox'])),
('email_outgoing', self.gf('django.db.models.fields.related.ForeignKey')(
blank=True, related_name='outgoing', null=True, to=orm['messaging.EmailBox'])),
))
db.send_create_signal('messaging', ['MessageStream'])
# Adding model 'Message'
db.create_table('messaging_message', (
('object_ptr', self.gf('django.db.models.fields.related.OneToOneField')(
to=orm['core.Object'], unique=True, primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')
(max_length=255, null=True, blank=True)),
('body', self.gf('django.db.models.fields.TextField')()),
('author', self.gf('django.db.models.fields.related.ForeignKey')
(to=orm['identities.Contact'])),
('stream', self.gf('django.db.models.fields.related.ForeignKey')
(related_name='stream', to=orm['messaging.MessageStream'])),
('reply_to', self.gf('django.db.models.fields.related.ForeignKey')(
blank=True, related_name='child_set', null=True, to=orm['messaging.Message'])),
))
db.send_create_signal('messaging', ['Message'])
# Adding M2M table for field read_by on 'Message'
db.create_table('messaging_message_read_by', (
('id', models.AutoField(
verbose_name='ID', primary_key=True, auto_created=True)),
('message', models.ForeignKey(
orm['messaging.message'], null=False)),
('user', models.ForeignKey(orm['core.user'], null=False))
))
db.create_unique(
'messaging_message_read_by', ['message_id', 'user_id'])
def backwards(self, orm):
# Deleting model 'EmailBox'
db.delete_table('messaging_emailbox')
# Deleting model 'MessageStream'
db.delete_table('messaging_messagestream')
# Deleting model 'Message'
db.delete_table('messaging_message')
# Removing M2M table for field read_by on 'Message'
db.delete_table('messaging_message_read_by')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_set'", 'null': 'True', 'to': "orm['core.Group']"})
},
'core.object': {
'Meta': {'object_name': 'Object'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'everybody_execute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'everybody_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'everybody_write': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Group']"}),
'group_execute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group_write': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'links': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'links_rel_+'", 'null': 'True', 'to': "orm['core.Object']"}),
'nuvius_resource': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'object_name': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'object_type': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'trash': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']"}),
'user_execute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user_write': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.user': {
'Meta': {'ordering': "['name']", 'object_name': 'User'},
'default_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'default_user_set'", 'null': 'True', 'to': "orm['core.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'other_groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.Group']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'identities.contact': {
'Meta': {'ordering': "['name']", 'object_name': 'Contact', '_ormbases': ['core.Object']},
'contact_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['identities.ContactType']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_set'", 'null': 'True', 'to': "orm['identities.Contact']"}),
'related_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Group']", 'null': 'True', 'blank': 'True'}),
'related_user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.User']", 'null': 'True', 'blank': 'True'})
},
'identities.contactfield': {
'Meta': {'ordering': "['name']", 'object_name': 'ContactField', '_ormbases': ['core.Object']},
'allowed_values': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'field_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'identities.contacttype': {
'Meta': {'ordering': "['name']", 'object_name': 'ContactType', '_ormbases': ['core.Object']},
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fields': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['identities.ContactField']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'messaging.emailbox': {
'Meta': {'ordering': "['last_updated']", 'object_name': 'EmailBox', '_ormbases': ['core.Object']},
'email_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'email_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'last_checked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'server_password': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'server_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'server_username': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'messaging.message': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Message', '_ormbases': ['core.Object']},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['identities.Contact']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'}),
'read_by': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'read_by_user'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.User']"}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_set'", 'null': 'True', 'to': "orm['messaging.Message']"}),
'stream': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stream'", 'to': "orm['messaging.MessageStream']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'messaging.messagestream': {
'Meta': {'ordering': "['name']", 'object_name': 'MessageStream', '_ormbases': ['core.Object']},
'email_incoming': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'incoming'", 'null': 'True', 'to': "orm['messaging.EmailBox']"}),
'email_outgoing': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'outgoing'", 'null': 'True', 'to': "orm['messaging.EmailBox']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'object_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.Object']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['messaging']
| thiagof/treeio | treeio/messaging/south_migrations/0001_initial.py | Python | mit | 16,396 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Contains the normalization layer classes and their functional aliases.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras import layers as keras_layers
from tensorflow.python.layers import base
from tensorflow.python.ops import init_ops
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
@tf_export(v1=['layers.BatchNormalization'])
class BatchNormalization(keras_layers.BatchNormalization, base.Layer):
"""Batch Normalization layer from http://arxiv.org/abs/1502.03167.
"Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift"
Sergey Ioffe, Christian Szegedy
Arguments:
axis: An `int` or list of `int`, the axis or axes that should be
normalized, typically the features axis/axes. For instance, after a
`Conv2D` layer with `data_format="channels_first"`, set `axis=1`. If a
list of axes is provided, each axis in `axis` will be normalized
simultaneously. Default is `-1` which uses the last axis. Note: when
using multi-axis batch norm, the `beta`, `gamma`, `moving_mean`, and
`moving_variance` variables are the same rank as the input Tensor, with
dimension size 1 in all reduced (non-axis) dimensions).
momentum: Momentum for the moving average.
epsilon: Small float added to variance to avoid dividing by zero.
center: If True, add offset of `beta` to normalized tensor. If False, `beta`
is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
beta_initializer: Initializer for the beta weight.
gamma_initializer: Initializer for the gamma weight.
moving_mean_initializer: Initializer for the moving mean.
moving_variance_initializer: Initializer for the moving variance.
beta_regularizer: Optional regularizer for the beta weight.
gamma_regularizer: Optional regularizer for the gamma weight.
beta_constraint: An optional projection function to be applied to the `beta`
weight after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
gamma_constraint: An optional projection function to be applied to the
`gamma` weight after being updated by an `Optimizer`.
renorm: Whether to use Batch Renormalization
(https://arxiv.org/abs/1702.03275). This adds extra variables during
training. The inference is the same for either value of this parameter.
renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to
scalar `Tensors` used to clip the renorm correction. The correction
`(r, d)` is used as `corrected_value = normalized_value * r + d`, with
`r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,
dmax are set to inf, 0, inf, respectively.
renorm_momentum: Momentum used to update the moving means and standard
deviations with renorm. Unlike `momentum`, this affects training
and should be neither too small (which would add noise) nor too large
(which would give stale estimates). Note that `momentum` is still applied
to get the means and variances for inference.
fused: if `None` or `True`, use a faster, fused implementation if possible.
If `False`, use the system recommended implementation.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
virtual_batch_size: An `int`. By default, `virtual_batch_size` is `None`,
which means batch normalization is performed across the whole batch. When
`virtual_batch_size` is not `None`, instead perform "Ghost Batch
Normalization", which creates virtual sub-batches which are each
normalized separately (with shared gamma, beta, and moving statistics).
Must divide the actual batch size during execution.
adjustment: A function taking the `Tensor` containing the (dynamic) shape of
the input tensor and returning a pair (scale, bias) to apply to the
normalized values (before gamma and beta), only during training. For
example, if axis==-1,
`adjustment = lambda shape: (
tf.random_uniform(shape[-1:], 0.93, 1.07),
tf.random_uniform(shape[-1:], -0.1, 0.1))`
will scale the normalized value by up to 7% up or down, then shift the
result by up to 0.1 (with independent scaling and bias for each feature
but shared across all examples), and finally apply gamma and/or beta. If
`None`, no adjustment is applied. Cannot be specified if
virtual_batch_size is specified.
name: A string, the name of the layer.
"""
def __init__(self,
axis=-1,
momentum=0.99,
epsilon=1e-3,
center=True,
scale=True,
beta_initializer=init_ops.zeros_initializer(),
gamma_initializer=init_ops.ones_initializer(),
moving_mean_initializer=init_ops.zeros_initializer(),
moving_variance_initializer=init_ops.ones_initializer(),
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
renorm=False,
renorm_clipping=None,
renorm_momentum=0.99,
fused=None,
trainable=True,
virtual_batch_size=None,
adjustment=None,
name=None,
**kwargs):
super(BatchNormalization, self).__init__(
axis=axis,
momentum=momentum,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
moving_mean_initializer=moving_mean_initializer,
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
beta_constraint=beta_constraint,
gamma_constraint=gamma_constraint,
renorm=renorm,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_momentum,
fused=fused,
trainable=trainable,
virtual_batch_size=virtual_batch_size,
adjustment=adjustment,
name=name,
**kwargs)
def call(self, inputs, training=False):
return super(BatchNormalization, self).call(inputs, training=training)
@deprecation.deprecated(
date=None,
instructions='Use keras.layers.batch_normalization instead.')
@tf_export(v1=['layers.batch_normalization'])
def batch_normalization(inputs,
axis=-1,
momentum=0.99,
epsilon=1e-3,
center=True,
scale=True,
beta_initializer=init_ops.zeros_initializer(),
gamma_initializer=init_ops.ones_initializer(),
moving_mean_initializer=init_ops.zeros_initializer(),
moving_variance_initializer=init_ops.ones_initializer(),
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
training=False,
trainable=True,
name=None,
reuse=None,
renorm=False,
renorm_clipping=None,
renorm_momentum=0.99,
fused=None,
virtual_batch_size=None,
adjustment=None):
"""Functional interface for the batch normalization layer.
Reference: http://arxiv.org/abs/1502.03167
"Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift"
Sergey Ioffe, Christian Szegedy
Note: when training, the moving_mean and moving_variance need to be updated.
By default the update ops are placed in `tf.GraphKeys.UPDATE_OPS`, so they
need to be executed alongside the `train_op`. Also, be sure to add any
batch_normalization ops before getting the update_ops collection. Otherwise,
update_ops will be empty, and training/inference will not work properly. For
example:
```python
x_norm = tf.layers.batch_normalization(x, training=training)
# ...
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
train_op = optimizer.minimize(loss)
train_op = tf.group([train_op, update_ops])
```
Arguments:
inputs: Tensor input.
axis: An `int`, the axis that should be normalized (typically the features
axis). For instance, after a `Convolution2D` layer with
`data_format="channels_first"`, set `axis=1` in `BatchNormalization`.
momentum: Momentum for the moving average.
epsilon: Small float added to variance to avoid dividing by zero.
center: If True, add offset of `beta` to normalized tensor. If False, `beta`
is ignored.
scale: If True, multiply by `gamma`. If False, `gamma` is
not used. When the next layer is linear (also e.g. `nn.relu`), this can be
disabled since the scaling can be done by the next layer.
beta_initializer: Initializer for the beta weight.
gamma_initializer: Initializer for the gamma weight.
moving_mean_initializer: Initializer for the moving mean.
moving_variance_initializer: Initializer for the moving variance.
beta_regularizer: Optional regularizer for the beta weight.
gamma_regularizer: Optional regularizer for the gamma weight.
beta_constraint: An optional projection function to be applied to the `beta`
weight after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
gamma_constraint: An optional projection function to be applied to the
`gamma` weight after being updated by an `Optimizer`.
training: Either a Python boolean, or a TensorFlow boolean scalar tensor
(e.g. a placeholder). Whether to return the output in training mode
(normalized with statistics of the current batch) or in inference mode
(normalized with moving statistics). **NOTE**: make sure to set this
parameter correctly, or else your training/inference will not work
properly.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
name: String, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
renorm: Whether to use Batch Renormalization
(https://arxiv.org/abs/1702.03275). This adds extra variables during
training. The inference is the same for either value of this parameter.
renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to
scalar `Tensors` used to clip the renorm correction. The correction
`(r, d)` is used as `corrected_value = normalized_value * r + d`, with
`r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,
dmax are set to inf, 0, inf, respectively.
renorm_momentum: Momentum used to update the moving means and standard
deviations with renorm. Unlike `momentum`, this affects training
and should be neither too small (which would add noise) nor too large
(which would give stale estimates). Note that `momentum` is still applied
to get the means and variances for inference.
fused: if `None` or `True`, use a faster, fused implementation if possible.
If `False`, use the system recommended implementation.
virtual_batch_size: An `int`. By default, `virtual_batch_size` is `None`,
which means batch normalization is performed across the whole batch. When
`virtual_batch_size` is not `None`, instead perform "Ghost Batch
Normalization", which creates virtual sub-batches which are each
normalized separately (with shared gamma, beta, and moving statistics).
Must divide the actual batch size during execution.
adjustment: A function taking the `Tensor` containing the (dynamic) shape of
the input tensor and returning a pair (scale, bias) to apply to the
normalized values (before gamma and beta), only during training. For
example, if axis==-1,
`adjustment = lambda shape: (
tf.random_uniform(shape[-1:], 0.93, 1.07),
tf.random_uniform(shape[-1:], -0.1, 0.1))`
will scale the normalized value by up to 7% up or down, then shift the
result by up to 0.1 (with independent scaling and bias for each feature
but shared across all examples), and finally apply gamma and/or beta. If
`None`, no adjustment is applied. Cannot be specified if
virtual_batch_size is specified.
Returns:
Output tensor.
Raises:
ValueError: if eager execution is enabled.
"""
layer = BatchNormalization(
axis=axis,
momentum=momentum,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
moving_mean_initializer=moving_mean_initializer,
moving_variance_initializer=moving_variance_initializer,
beta_regularizer=beta_regularizer,
gamma_regularizer=gamma_regularizer,
beta_constraint=beta_constraint,
gamma_constraint=gamma_constraint,
renorm=renorm,
renorm_clipping=renorm_clipping,
renorm_momentum=renorm_momentum,
fused=fused,
trainable=trainable,
virtual_batch_size=virtual_batch_size,
adjustment=adjustment,
name=name,
_reuse=reuse,
_scope=name)
return layer.apply(inputs, training=training)
# Aliases
BatchNorm = BatchNormalization
batch_norm = batch_normalization
| hfp/tensorflow-xsmm | tensorflow/python/layers/normalization.py | Python | apache-2.0 | 15,335 |
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['LinearTrend'] , ['Seasonal_MonthOfYear'] , ['SVR'] ); | antoinecarme/pyaf | tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_LinearTrend_Seasonal_MonthOfYear_SVR.py | Python | bsd-3-clause | 164 |
import string
t = string.Template('$var')
print(t.pattern.pattern)
| jasonwee/asus-rt-n14uhp-mrtg | src/lesson_text/string_template_defaultpattern.py | Python | apache-2.0 | 69 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 17 11:08:30 2014
@author: Fabrizio Coccetti (fabrizio.coccetti@centrofermi.it) [www.fc8.net]
Query Run Db and extract Transfer and DQM data
"""
import os
import MySQLdb
from datetime import datetime
import ConfigParser
import pickle
import logging
import logging.config
from e3monitor.config.__files_server__ import (logConfigFile,
dbConfigFile,
pklDqmFile,
pklTransferFile,
pklTracksFile,
pathWorkDir)
from e3monitor.db.E3Dqm import E3Dqm
from e3monitor.db.E3Transfer import E3Transfer
# List with the name of the Schools
schoolNames = []
# Class with methods with last run in DQM from the database
dqmData = E3Dqm()
transferData = E3Transfer()
# Define dates
startRun = datetime(2014, 9, 1)
startRunStr = startRun.strftime("%Y-%m-%d")
today = datetime.today()
todayStr = today.strftime("%Y-%m-%d")
# Set up logging
logging.config.fileConfig(logConfigFile)
logger = logging.getLogger('full')
logger.info('Started')
logger = logging.getLogger('plain')
# Reading db ini file
logger.info('Reading ' + dbConfigFile)
parser = ConfigParser.ConfigParser()
parser.read(dbConfigFile)
host = parser.get('General', 'host')
user = parser.get('General', 'user')
dbname = parser.get('General', 'dbname')
passwd = parser.get('General', 'passwd')
# Connecting to the database
logger.info('Connecting to %s on %s (as %s)' % (dbname, host, user))
db = MySQLdb.connect(host=host, user=user, passwd=passwd, db=dbname)
cur = db.cursor()
# Connect to the School's table and get the school name list
logger.info('Connect to the School\'s table and get the school name list')
query = "SELECT name FROM telescopes;"
logger.info('Get the School\'s name list: ' + query)
cur.execute(query)
schoolNames = [item[0] for item in cur.fetchall()]
sorted(schoolNames)
# Initialize Transfer Data
for _schoolName in schoolNames:
transferData.init_School(_schoolName)
print(transferData.schoolData(_schoolName))
# Query for the last transferred file at CNAF
logger.info('Query for the last run transferred at CNAF of each school')
query = ("SELECT station_name, run_date, run_id, bin_file_size, "
"transfer_timestamp, last_update "
"FROM runs2 WHERE station_name = %s "
"ORDER BY transfer_timestamp DESC LIMIT 1;")
logger.info('About to query: ' + query)
for _schoolName in schoolNames:
cur.execute(query, _schoolName)
_entry = cur.fetchone()
if _entry is None:
continue
# Assign parameter to the class
transferData.add_entry(_schoolName, _entry)
logger.info('Read School: ' + _schoolName)
logger.info(transferData.schoolData(_schoolName))
# Query for the number of files transferred today
_beginTime = datetime.today().strftime("%Y-%m-%d") + " 00:00:00"
_endTime = datetime.today().strftime("%Y-%m-%d") + " 23:59:59"
logger.info('Query for the number of files transferred today')
query = ("SELECT COUNT(*) FROM runs2 "
"WHERE station_name = %s "
"AND (transfer_timestamp BETWEEN %s AND %s);")
logger.info('About to query: ' + query)
for _schoolName in schoolNames:
queryParam = (_schoolName, _beginTime, _endTime)
cur.execute(query, queryParam)
_entry = cur.fetchone()
if _entry is None:
continue
# Assign parameter to the class
transferData.set_numFiles(_schoolName, _entry)
# transferData.add_entry(_schoolName, _entry)
logger.info('Read School: ' + _schoolName)
logger.info(transferData.schoolData(_schoolName))
logger.info(transferData.get_numFiles(_schoolName))
# Save the Transfer data extracted from the db
logger.info('Writing data to file...')
output = open(os.path.join(pathWorkDir, pklTransferFile), 'wb')
pickle.dump(transferData, output)
output.close()
logger = logging.getLogger('full')
logger.info('Written ' + os.path.join(pathWorkDir, pklTransferFile))
# Query for DQM: the last run data of each school
logger.info('Query for the last run in DQM of each school')
query = ("SELECT * FROM runs2 WHERE station_name = %s "
"AND processing_status_code=0 "
"ORDER BY last_update DESC LIMIT 1;")
logger.info('About to query: ' + query)
for _schoolName in schoolNames:
cur.execute(query, _schoolName)
_entry = cur.fetchone()
if _entry is None:
continue
# Assign parameter to the class
dqmData.add_entry(_schoolName, _entry)
logger.info('Read School: ' + _schoolName)
logger.info(dqmData.schoolData(_schoolName))
# Query for Statistics
logger.info('Query for statistics:')
logger.info('1. Query of the total number of Tracks')
query = ("SELECT SUM(num_track_events) from runs2 WHERE (run_date >= %s AND run_date <= %s);")
queryParam = (startRunStr, todayStr)
cur.execute(query, queryParam)
try:
totalTracks = int(cur.fetchone()[0])
except:
totalTracks = 0
logger.info('Total Tracks: ' + str(totalTracks))
# Save the DQM data extracted from the db
logger.info('Writing data to file...')
output = open(os.path.join(pathWorkDir, pklDqmFile), 'wb')
pickle.dump(dqmData, output)
output.close()
logger = logging.getLogger('full')
logger.info('Written ' + os.path.join(pathWorkDir, pklDqmFile))
# Save the total number of tracks
logger.info('Writing totalTracks number to file...')
output = open(os.path.join(pathWorkDir, pklTracksFile), 'wb')
pickle.dump(totalTracks, output)
output.close()
logger = logging.getLogger('full')
logger.info('Written ' + os.path.join(pathWorkDir, pklTracksFile))
cur.close()
db.close()
logger.info('Finished')
| centrofermi/e3monitor | apps/e3db.py | Python | gpl-3.0 | 5,727 |
#!/usr/bin/python
"""
Start IPython cluster.
"""
import sys, logging, os
from optparse import OptionParser
from ConfigParser import SafeConfigParser as ConfigParser
from lofarpipe.support.clusterhandler import ClusterHandler
parser = OptionParser()
parser.add_option(
"--config", help="Pipeline configuration file", default="~/.pipeline.cfg"
)
parser.add_option(
"--num-engines", help="Number of engines per node", default=8
)
options, args = parser.parse_args()
my_logger = logging.getLogger()
stream_handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(
"%(asctime)s %(levelname)-7s: %(message)s",
"%Y-%m-%d %H:%M:%S"
)
stream_handler.setFormatter(formatter)
my_logger.addHandler(stream_handler)
my_logger.setLevel(logging.DEBUG)
config = ConfigParser()
config.read(os.path.expanduser(options.config))
clusterhandler = ClusterHandler(config)
clusterhandler.start_cluster(options.num_engines)
| jjdmol/LOFAR | CEP/Pipeline/deploy/deprecated/start_cluster.py | Python | gpl-3.0 | 938 |
# Code copied from http://djangosnippets.org/snippets/19/
# License information from http://djangosnippets.org/about/tos/ :
# I hate legal-speak as much as anybody, but on a site which is geared
# toward sharing code there has to be at least a little bit of it, so
# here goes:
#
# By creating an account here you agree to three things:
#
# 1. That you will only post code which you wrote yourself and that
# you have the legal right to release under these terms.
#
# 2. That you grant any third party who sees the code you post a
# royalty-free, non-exclusive license to copy and distribute that code
# and to make and distribute derivative works based on that code. You
# may include license terms in snippets you post, if you wish to use a
# particular license (such as the BSD license or GNU GPL), but that
# license must permit royalty-free copying, distribution and
# modification of the code to which it is applied.
#
# 3. That if you post code of which you are not the author or for
# which you do not have the legal right to distribute according to
# these terms, you will indemnify and hold harmless the operators of
# this site and any third parties who are exposed to liability as a
# result of your actions. If you can't legally agree to these terms,
# or don't want to, you cannot create an account here.
import re
import cgi
re_string = re.compile(r'(?P<htmlchars>[<&>])|(?P<space>^[ \t]+)|(?P<lineend>\r\n|\r|\n)|(?P<protocal>(^|\s)((http|ftp)://.*?))(\s|$)', re.S|re.M|re.I)
def plaintext2html(text, tabstop=4):
def do_sub(m):
c = m.groupdict()
if c['htmlchars']:
return cgi.escape(c['htmlchars'])
if c['lineend']:
return '<br>'
elif c['space']:
t = m.group().replace('\t', ' '*tabstop)
t = t.replace(' ', ' ')
return t
elif c['space'] == '\t':
return ' '*tabstop;
else:
url = m.group('protocal')
if url.startswith(' '):
prefix = ' '
url = url[1:]
else:
prefix = ''
last = m.groups()[-1]
if last in ['\n', '\r', '\r\n']:
last = '<br>'
return '%s<a href="%s">%s</a>%s' % (prefix, url, url, last)
return re.sub(re_string, do_sub, text)
| RobFisher/mailshare | mailshareapp/plaintext.py | Python | bsd-3-clause | 2,330 |
#!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 22888
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
| fedaykinofdune/octocoin | contrib/pyminer/pyminer.py | Python | mit | 6,435 |
import StringIO
import re
import codecs
import inspect
import os
import traceback
from collections import defaultdict
from jinja2 import Environment
from jinja2 import FileSystemLoader
from nose.exc import SkipTest
from nose.plugins import Plugin
import sys
from datetime import datetime
__version__ = '0.1.0'
TEST_ID = re.compile(r'^(.*?)(\(.*\))$')
def id_split(idval):
m = TEST_ID.match(idval)
if m:
name, fargs = m.groups()
head, tail = name.rsplit(".", 1)
return [head, tail+fargs]
else:
return idval.rsplit(".", 1)
def nice_classname(obj):
"""Returns a nice name for class object or class instance.
>>> nice_classname(Exception()) # doctest: +ELLIPSIS
'...Exception'
>>> nice_classname(Exception) # doctest: +ELLIPSIS
'...Exception'
"""
if inspect.isclass(obj):
cls_name = obj.__name__
else:
cls_name = obj.__class__.__name__
mod = inspect.getmodule(obj)
if mod:
name = mod.__name__
# jython
if name.startswith('org.python.core.'):
name = name[len('org.python.core.'):]
return "%s.%s" % (name, cls_name)
else:
return cls_name
def exc_message(exc_info):
"""Return the exception's message."""
exc = exc_info[1]
if exc is None:
# str exception
result = exc_info[0]
else:
try:
result = str(exc)
except UnicodeEncodeError:
try:
result = unicode(exc) # flake8: noqa
except UnicodeError:
# Fallback to args as neither str nor
# unicode(Exception(u'\xe6')) work in Python < 2.6
result = exc.args[0]
return result
class OutputRedirector(object):
""" Wrapper to redirect stdout or stderr """
def __init__(self, fp):
self.fp = fp
def write(self, s):
self.fp.write(s)
def writelines(self, lines):
self.fp.writelines(lines)
def flush(self):
self.fp.flush()
stdout_redirector = OutputRedirector(sys.stdout)
stderr_redirector = OutputRedirector(sys.stderr)
class Group(object):
def __init__(self):
self.stats = {'errors': 0, 'failures': 0, 'passes': 0, 'skipped': 0}
self.tests = []
class HtmlReport(Plugin):
"""
Output test results as pretty html.
"""
name = 'html'
score = 2000
encoding = 'UTF-8'
report_file = None
# stdout0 = None
# stderr0 = None
# outputBuffer = None
def __init__(self, verbosity=1):
super(HtmlReport, self).__init__()
self.stdout0 = None
self.stderr0 = None
self.outputBuffer = StringIO.StringIO()
self.verbosity = verbosity
def startTest(self, test):
# just one buffer for both stdout and stderr
self.outputBuffer = StringIO.StringIO()
stdout_redirector.fp = self.outputBuffer
stderr_redirector.fp = self.outputBuffer
self.stdout0 = sys.stdout
self.stderr0 = sys.stderr
sys.stdout = stdout_redirector
sys.stderr = stderr_redirector
def complete_output(self):
"""
Disconnect output redirection and return buffer.
Safe to call multiple times.
"""
if self.stdout0:
sys.stdout = self.stdout0
sys.stderr = self.stderr0
self.stdout0 = None
self.stderr0 = None
return self.outputBuffer.getvalue()
def stopTest(self, test):
# Usually one of addSuccess, addError or addFailure would have been called.
# But there are some path in unittest that would bypass this.
# We must disconnect stdout in stopTest(), which is guaranteed to be called.
self.complete_output()
def options(self, parser, env):
"""Sets additional command line options."""
Plugin.options(self, parser, env)
parser.add_option(
'--html-report', action='store',
dest='html_file', metavar="FILE",
default=env.get('NOSE_HTML_FILE', 'nosetests.html'),
help="Path to html file to store the report in. "
"Default is nosetests.html in the working directory "
"[NOSE_HTML_FILE]")
parser.add_option(
'--html-report-template', action='store',
dest='html_template', metavar="FILE",
default=env.get('NOSE_HTML_TEMPLATE_FILE',
os.path.join(os.path.dirname(__file__), "templates", "report.html")),
help="Path to html template file in with jinja2 format."
"Default is report.html in the lib sources"
"[NOSE_HTML_TEMPLATE_FILE]")
def configure(self, options, config):
"""Configures the xunit plugin."""
Plugin.configure(self, options, config)
self.config = config
if self.enabled:
self.jinja = Environment(
loader=FileSystemLoader(os.path.dirname(options.html_template)),
trim_blocks=True,
lstrip_blocks=True
)
self.stats = {'errors': 0, 'failures': 0, 'passes': 0, 'skipped': 0}
self.report_data = defaultdict(Group)
self.report_file = codecs.open(options.html_file, 'w', self.encoding, 'replace')
self.report_template_filename = options.html_template
def report(self, stream):
"""Writes an Xunit-formatted XML file
The file includes a report of test errors and failures.
"""
self.stats['total'] = sum(self.stats.values())
for group in self.report_data.values():
group.stats['total'] = sum(group.stats.values())
self.report_file.write(self.jinja.get_template(os.path.basename(self.report_template_filename)).render(
report=self.report_data,
stats=self.stats,
# Timestamp available in templates, to use it when report is generated
timestamp=datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
))
self.report_file.close()
if self.config.verbosity > 1:
stream.writeln("-" * 70)
stream.writeln("HTML: %s" % self.report_file.name)
def addSuccess(self, test):
name = id_split(test.id())
group = self.report_data[name[0]]
self.stats['passes'] += 1
group.stats['passes'] += 1
group.tests.append({
'name': name[-1],
'failed': False,
'output': self._format_output(self.complete_output()),
'shortDescription': test.shortDescription(),
})
def addError(self, test, err, capt=None):
"""Add error output to Xunit report.
"""
exc_type, exc_val, tb = err
tb = ''.join(traceback.format_exception(
exc_type,
exc_val if isinstance(exc_val, exc_type) else exc_type(exc_val),
tb
))
name = id_split(test.id())
group = self.report_data[name[0]]
if issubclass(err[0], SkipTest):
type = 'skipped'
self.stats['skipped'] += 1
group.stats['skipped'] += 1
else:
type = 'error'
self.stats['errors'] += 1
group.stats['errors'] += 1
group.tests.append({
'name': name[-1],
'failed': True,
'type': type,
'errtype': nice_classname(err[0]),
'message': exc_message(err),
'tb': tb,
'output': self._format_output(self.complete_output()),
'shortDescription': test.shortDescription(),
})
def addFailure(self, test, err, capt=None):
"""Add failure output to Xunit report.
"""
exc_type, exc_val, tb = err
tb = ''.join(traceback.format_exception(
exc_type,
exc_val if isinstance(exc_val, exc_type) else exc_type(exc_val),
tb
))
name = id_split(test.id())
group = self.report_data[name[0]]
self.stats['failures'] += 1
group.stats['failures'] += 1
group.tests.append({
'name': name[-1],
'failed': True,
'errtype': nice_classname(err[0]),
'message': exc_message(err),
'tb': tb,
'output': self._format_output(self.complete_output()),
'shortDescription': test.shortDescription(),
})
def _format_output(self, o):
if isinstance(o, str):
return o.decode('latin-1')
else:
return o | telefonicaid/nose-html-reporting | src/nose_html_reporting/__init__.py | Python | bsd-2-clause | 8,592 |
# Copyright (c) 2013 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Implements operations on volumes residing on VMware datastores.
"""
from oslo_log import log as logging
from oslo_utils import units
from oslo_vmware import exceptions
from oslo_vmware import pbm
from oslo_vmware import vim_util
from six.moves import urllib
from cinder.i18n import _, _LE, _LI
from cinder.volume.drivers.vmware import exceptions as vmdk_exceptions
LOG = logging.getLogger(__name__)
LINKED_CLONE_TYPE = 'linked'
FULL_CLONE_TYPE = 'full'
def split_datastore_path(datastore_path):
"""Split the datastore path to components.
return the datastore name, relative folder path and the file name
E.g. datastore_path = [datastore1] my_volume/my_volume.vmdk, returns
(datastore1, my_volume/, my_volume.vmdk)
:param datastore_path: Datastore path of a file
:return: Parsed datastore name, relative folder path and file name
"""
splits = datastore_path.split('[', 1)[1].split(']', 1)
datastore_name = None
folder_path = None
file_name = None
if len(splits) == 1:
datastore_name = splits[0]
else:
datastore_name, path = splits
# Path will be of form my_volume/my_volume.vmdk
# we need into my_volumes/ and my_volume.vmdk
splits = path.split('/')
file_name = splits[len(splits) - 1]
folder_path = path[:-len(file_name)]
return (datastore_name.strip(), folder_path.strip(), file_name.strip())
class VirtualDiskPath(object):
"""Class representing paths of files comprising a virtual disk."""
def __init__(self, ds_name, folder_path, disk_name):
"""Creates path object for the given disk.
:param ds_name: name of the datastore where disk is stored
:param folder_path: absolute path of the folder containing the disk
:param disk_name: name of the virtual disk
"""
self._descriptor_file_path = "%s%s.vmdk" % (folder_path, disk_name)
self._descriptor_ds_file_path = self.get_datastore_file_path(
ds_name, self._descriptor_file_path)
def get_datastore_file_path(self, ds_name, file_path):
"""Get datastore path corresponding to the given file path.
:param ds_name: name of the datastore containing the file represented
by the given file path
:param file_path: absolute path of the file
:return: datastore file path
"""
return "[%s] %s" % (ds_name, file_path)
def get_descriptor_file_path(self):
"""Get absolute file path of the virtual disk descriptor."""
return self._descriptor_file_path
def get_descriptor_ds_file_path(self):
"""Get datastore file path of the virtual disk descriptor."""
return self._descriptor_ds_file_path
class FlatExtentVirtualDiskPath(VirtualDiskPath):
"""Paths of files in a non-monolithic disk with a single flat extent."""
def __init__(self, ds_name, folder_path, disk_name):
"""Creates path object for the given disk.
:param ds_name: name of the datastore where disk is stored
:param folder_path: absolute path of the folder containing the disk
:param disk_name: name of the virtual disk
"""
super(FlatExtentVirtualDiskPath, self).__init__(
ds_name, folder_path, disk_name)
self._flat_extent_file_path = "%s%s-flat.vmdk" % (folder_path,
disk_name)
self._flat_extent_ds_file_path = self.get_datastore_file_path(
ds_name, self._flat_extent_file_path)
def get_flat_extent_file_path(self):
"""Get absolute file path of the flat extent."""
return self._flat_extent_file_path
def get_flat_extent_ds_file_path(self):
"""Get datastore file path of the flat extent."""
return self._flat_extent_ds_file_path
class MonolithicSparseVirtualDiskPath(VirtualDiskPath):
"""Paths of file comprising a monolithic sparse disk."""
pass
class VirtualDiskType(object):
"""Supported virtual disk types."""
EAGER_ZEROED_THICK = "eagerZeroedThick"
PREALLOCATED = "preallocated"
THIN = "thin"
# thick in extra_spec means lazy-zeroed thick disk
EXTRA_SPEC_DISK_TYPE_DICT = {'eagerZeroedThick': EAGER_ZEROED_THICK,
'thick': PREALLOCATED,
'thin': THIN
}
@staticmethod
def is_valid(extra_spec_disk_type):
"""Check if the given disk type in extra_spec is valid.
:param extra_spec_disk_type: disk type in extra_spec
:return: True if valid
"""
return (extra_spec_disk_type in
VirtualDiskType.EXTRA_SPEC_DISK_TYPE_DICT)
@staticmethod
def validate(extra_spec_disk_type):
"""Validate the given disk type in extra_spec.
This method throws an instance of InvalidDiskTypeException if the given
disk type is invalid.
:param extra_spec_disk_type: disk type in extra_spec
:raises: InvalidDiskTypeException
"""
if not VirtualDiskType.is_valid(extra_spec_disk_type):
raise vmdk_exceptions.InvalidDiskTypeException(
disk_type=extra_spec_disk_type)
@staticmethod
def get_virtual_disk_type(extra_spec_disk_type):
"""Return disk type corresponding to the extra_spec disk type.
:param extra_spec_disk_type: disk type in extra_spec
:return: virtual disk type
:raises: InvalidDiskTypeException
"""
VirtualDiskType.validate(extra_spec_disk_type)
return (VirtualDiskType.EXTRA_SPEC_DISK_TYPE_DICT[
extra_spec_disk_type])
class VirtualDiskAdapterType(object):
"""Supported virtual disk adapter types."""
LSI_LOGIC = "lsiLogic"
BUS_LOGIC = "busLogic"
LSI_LOGIC_SAS = "lsiLogicsas"
PARA_VIRTUAL = "paraVirtual"
IDE = "ide"
@staticmethod
def is_valid(adapter_type):
"""Check if the given adapter type is valid.
:param adapter_type: adapter type to check
:return: True if valid
"""
return adapter_type in [VirtualDiskAdapterType.LSI_LOGIC,
VirtualDiskAdapterType.BUS_LOGIC,
VirtualDiskAdapterType.LSI_LOGIC_SAS,
VirtualDiskAdapterType.PARA_VIRTUAL,
VirtualDiskAdapterType.IDE]
@staticmethod
def validate(extra_spec_adapter_type):
"""Validate the given adapter type in extra_spec.
This method throws an instance of InvalidAdapterTypeException if the
given adapter type is invalid.
:param extra_spec_adapter_type: adapter type in extra_spec
:raises: InvalidAdapterTypeException
"""
if not VirtualDiskAdapterType.is_valid(extra_spec_adapter_type):
raise vmdk_exceptions.InvalidAdapterTypeException(
invalid_type=extra_spec_adapter_type)
@staticmethod
def get_adapter_type(extra_spec_adapter):
"""Get the adapter type to be used in VirtualDiskSpec.
:param extra_spec_adapter: adapter type in the extra_spec
:return: adapter type to be used in VirtualDiskSpec
"""
VirtualDiskAdapterType.validate(extra_spec_adapter)
# We set the adapter type as lsiLogic for lsiLogicsas/paraVirtual
# since it is not supported by VirtualDiskManager APIs. This won't
# be a problem because we attach the virtual disk to the correct
# controller type and the disk adapter type is always resolved using
# its controller key.
if (extra_spec_adapter == VirtualDiskAdapterType.LSI_LOGIC_SAS or
extra_spec_adapter == VirtualDiskAdapterType.PARA_VIRTUAL):
return VirtualDiskAdapterType.LSI_LOGIC
else:
return extra_spec_adapter
class ControllerType(object):
"""Encapsulate various controller types."""
LSI_LOGIC = 'VirtualLsiLogicController'
BUS_LOGIC = 'VirtualBusLogicController'
LSI_LOGIC_SAS = 'VirtualLsiLogicSASController'
PARA_VIRTUAL = 'ParaVirtualSCSIController'
IDE = 'VirtualIDEController'
CONTROLLER_TYPE_DICT = {
VirtualDiskAdapterType.LSI_LOGIC: LSI_LOGIC,
VirtualDiskAdapterType.BUS_LOGIC: BUS_LOGIC,
VirtualDiskAdapterType.LSI_LOGIC_SAS: LSI_LOGIC_SAS,
VirtualDiskAdapterType.PARA_VIRTUAL: PARA_VIRTUAL,
VirtualDiskAdapterType.IDE: IDE}
@staticmethod
def get_controller_type(adapter_type):
"""Get the disk controller type based on the given adapter type.
:param adapter_type: disk adapter type
:return: controller type corresponding to the given adapter type
:raises: InvalidAdapterTypeException
"""
if adapter_type in ControllerType.CONTROLLER_TYPE_DICT:
return ControllerType.CONTROLLER_TYPE_DICT[adapter_type]
raise vmdk_exceptions.InvalidAdapterTypeException(
invalid_type=adapter_type)
@staticmethod
def is_scsi_controller(controller_type):
"""Check if the given controller is a SCSI controller.
:param controller_type: controller type
:return: True if the controller is a SCSI controller
"""
return controller_type in [ControllerType.LSI_LOGIC,
ControllerType.BUS_LOGIC,
ControllerType.LSI_LOGIC_SAS,
ControllerType.PARA_VIRTUAL]
class VMwareVolumeOps(object):
"""Manages volume operations."""
def __init__(self, session, max_objects):
self._session = session
self._max_objects = max_objects
self._folder_cache = {}
def get_backing(self, name):
"""Get the backing based on name.
:param name: Name of the backing
:return: Managed object reference to the backing
"""
retrieve_result = self._session.invoke_api(vim_util, 'get_objects',
self._session.vim,
'VirtualMachine',
self._max_objects)
while retrieve_result:
vms = retrieve_result.objects
for vm in vms:
if vm.propSet[0].val == name:
# We got the result, so cancel further retrieval.
self.cancel_retrieval(retrieve_result)
return vm.obj
# Result not obtained, continue retrieving results.
retrieve_result = self.continue_retrieval(retrieve_result)
LOG.debug("Did not find any backing with name: %s", name)
def delete_backing(self, backing):
"""Delete the backing.
:param backing: Managed object reference to the backing
"""
LOG.debug("Deleting the VM backing: %s.", backing)
task = self._session.invoke_api(self._session.vim, 'Destroy_Task',
backing)
LOG.debug("Initiated deletion of VM backing: %s.", backing)
self._session.wait_for_task(task)
LOG.info(_LI("Deleted the VM backing: %s."), backing)
# TODO(kartikaditya) Keep the methods not specific to volume in
# a different file
def get_host(self, instance):
"""Get host under which instance is present.
:param instance: Managed object reference of the instance VM
:return: Host managing the instance VM
"""
return self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, instance,
'runtime.host')
def is_host_usable(self, host):
"""Check if the given ESX host is usable.
A host is usable if it is connected to vCenter server and not in
maintenance mode.
:param host: Managed object reference to the ESX host
:return: True if host is usable, False otherwise
"""
runtime_info = self._session.invoke_api(vim_util,
'get_object_property',
self._session.vim,
host,
'runtime')
return (runtime_info.connectionState == 'connected' and
not runtime_info.inMaintenanceMode)
def get_hosts(self):
"""Get all host from the inventory.
:return: All the hosts from the inventory
"""
return self._session.invoke_api(vim_util, 'get_objects',
self._session.vim,
'HostSystem', self._max_objects)
def continue_retrieval(self, retrieve_result):
"""Continue retrieval of results if necessary.
:param retrieve_result: Result from RetrievePropertiesEx
"""
return self._session.invoke_api(vim_util, 'continue_retrieval',
self._session.vim, retrieve_result)
def cancel_retrieval(self, retrieve_result):
"""Cancel retrieval of results if necessary.
:param retrieve_result: Result from RetrievePropertiesEx
"""
self._session.invoke_api(vim_util, 'cancel_retrieval',
self._session.vim, retrieve_result)
# TODO(vbala): move this method to datastore module
def _is_usable(self, mount_info):
"""Check if a datastore is usable as per the given mount info.
The datastore is considered to be usable for a host only if it is
writable, mounted and accessible.
:param mount_info: Host mount information
:return: True if datastore is usable
"""
writable = mount_info.accessMode == 'readWrite'
# If mounted attribute is not set, then default is True
mounted = getattr(mount_info, 'mounted', True)
# If accessible attribute is not set, then default is False
accessible = getattr(mount_info, 'accessible', False)
return writable and mounted and accessible
def get_connected_hosts(self, datastore):
"""Get all the hosts to which the datastore is connected and usable.
The datastore is considered to be usable for a host only if it is
writable, mounted and accessible.
:param datastore: Reference to the datastore entity
:return: List of managed object references of all connected
hosts
"""
summary = self.get_summary(datastore)
if not summary.accessible:
return []
host_mounts = self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, datastore,
'host')
if not hasattr(host_mounts, 'DatastoreHostMount'):
return []
connected_hosts = []
for host_mount in host_mounts.DatastoreHostMount:
if self._is_usable(host_mount.mountInfo):
connected_hosts.append(host_mount.key.value)
return connected_hosts
def is_datastore_accessible(self, datastore, host):
"""Check if the datastore is accessible to the given host.
:param datastore: datastore reference
:return: True if the datastore is accessible
"""
hosts = self.get_connected_hosts(datastore)
return host.value in hosts
# TODO(vbala): move this method to datastore module
def _in_maintenance(self, summary):
"""Check if a datastore is entering maintenance or in maintenance.
:param summary: Summary information about the datastore
:return: True if the datastore is entering maintenance or in
maintenance
"""
if hasattr(summary, 'maintenanceMode'):
return summary.maintenanceMode in ['enteringMaintenance',
'inMaintenance']
return False
def _get_parent(self, child, parent_type):
"""Get immediate parent of given type via 'parent' property.
:param child: Child entity reference
:param parent_type: Entity type of the parent
:return: Immediate parent of specific type up the hierarchy via
'parent' property
"""
if not child:
return None
if child._type == parent_type:
return child
parent = self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, child, 'parent')
return self._get_parent(parent, parent_type)
def get_dc(self, child):
"""Get parent datacenter up the hierarchy via 'parent' property.
:param child: Reference of the child entity
:return: Parent Datacenter of the param child entity
"""
return self._get_parent(child, 'Datacenter')
def get_vmfolder(self, datacenter):
"""Get the vmFolder.
:param datacenter: Reference to the datacenter entity
:return: vmFolder property of the datacenter
"""
return self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, datacenter,
'vmFolder')
def _get_child_folder(self, parent_folder, child_folder_name):
# Get list of child entities for the parent folder
prop_val = self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, parent_folder,
'childEntity')
if prop_val and hasattr(prop_val, 'ManagedObjectReference'):
child_entities = prop_val.ManagedObjectReference
# Return if the child folder with input name is already present
for child_entity in child_entities:
if child_entity._type != 'Folder':
continue
child_entity_name = self.get_entity_name(child_entity)
if (child_entity_name
and (urllib.parse.unquote(child_entity_name)
== child_folder_name)):
LOG.debug("Child folder: %s exists.", child_folder_name)
return child_entity
def create_folder(self, parent_folder, child_folder_name):
"""Creates child folder with given name under the given parent folder.
The method first checks if a child folder already exists, if it does,
then it returns a moref for the folder, else it creates one and then
return the moref.
:param parent_folder: Reference to the folder entity
:param child_folder_name: Name of the child folder
:return: Reference to the child folder with input name if it already
exists, else create one and return the reference
"""
LOG.debug("Creating folder: %(child_folder_name)s under parent "
"folder: %(parent_folder)s.",
{'child_folder_name': child_folder_name,
'parent_folder': parent_folder})
child_folder = self._get_child_folder(parent_folder, child_folder_name)
if not child_folder:
# Need to create the child folder.
try:
child_folder = self._session.invoke_api(self._session.vim,
'CreateFolder',
parent_folder,
name=child_folder_name)
LOG.debug("Created child folder: %s.", child_folder)
except exceptions.DuplicateName:
# Another thread is trying to create the same folder, ignore
# the exception.
child_folder = self._get_child_folder(parent_folder,
child_folder_name)
return child_folder
def create_vm_inventory_folder(self, datacenter, path_comp):
"""Create and return a VM inventory folder.
This method caches references to inventory folders returned.
:param datacenter: Reference to datacenter
:param path_comp: Path components as a list
"""
LOG.debug("Creating inventory folder: %(path_comp)s under VM folder "
"of datacenter: %(datacenter)s.",
{'path_comp': path_comp,
'datacenter': datacenter})
path = "/" + datacenter.value
parent = self._folder_cache.get(path)
if not parent:
parent = self.get_vmfolder(datacenter)
self._folder_cache[path] = parent
folder = None
for folder_name in path_comp:
path = "/".join([path, folder_name])
folder = self._folder_cache.get(path)
if not folder:
folder = self.create_folder(parent, folder_name)
self._folder_cache[path] = folder
parent = folder
LOG.debug("Inventory folder for path: %(path)s is %(folder)s.",
{'path': path,
'folder': folder})
return folder
def extend_virtual_disk(self, requested_size_in_gb, path, dc_ref,
eager_zero=False):
"""Extend the virtual disk to the requested size.
:param requested_size_in_gb: Size of the volume in GB
:param path: Datastore path of the virtual disk to extend
:param dc_ref: Reference to datacenter
:param eager_zero: Boolean determining if the free space
is zeroed out
"""
LOG.debug("Extending virtual disk: %(path)s to %(size)s GB.",
{'path': path, 'size': requested_size_in_gb})
diskMgr = self._session.vim.service_content.virtualDiskManager
# VMWare API needs the capacity unit to be in KB, so convert the
# capacity unit from GB to KB.
size_in_kb = requested_size_in_gb * units.Mi
task = self._session.invoke_api(self._session.vim,
"ExtendVirtualDisk_Task",
diskMgr,
name=path,
datacenter=dc_ref,
newCapacityKb=size_in_kb,
eagerZero=eager_zero)
self._session.wait_for_task(task)
LOG.info(_LI("Successfully extended virtual disk: %(path)s to "
"%(size)s GB."),
{'path': path, 'size': requested_size_in_gb})
def _create_controller_config_spec(self, adapter_type):
"""Returns config spec for adding a disk controller."""
cf = self._session.vim.client.factory
controller_type = ControllerType.get_controller_type(adapter_type)
controller_device = cf.create('ns0:%s' % controller_type)
controller_device.key = -100
controller_device.busNumber = 0
if ControllerType.is_scsi_controller(controller_type):
controller_device.sharedBus = 'noSharing'
controller_spec = cf.create('ns0:VirtualDeviceConfigSpec')
controller_spec.operation = 'add'
controller_spec.device = controller_device
return controller_spec
def _create_disk_backing(self, disk_type, vmdk_ds_file_path):
"""Creates file backing for virtual disk."""
cf = self._session.vim.client.factory
disk_device_bkng = cf.create('ns0:VirtualDiskFlatVer2BackingInfo')
if disk_type == VirtualDiskType.EAGER_ZEROED_THICK:
disk_device_bkng.eagerlyScrub = True
elif disk_type == VirtualDiskType.THIN:
disk_device_bkng.thinProvisioned = True
disk_device_bkng.fileName = vmdk_ds_file_path or ''
disk_device_bkng.diskMode = 'persistent'
return disk_device_bkng
def _create_virtual_disk_config_spec(self, size_kb, disk_type,
controller_key, profile_id,
vmdk_ds_file_path):
"""Returns config spec for adding a virtual disk."""
cf = self._session.vim.client.factory
disk_device = cf.create('ns0:VirtualDisk')
# disk size should be at least 1024KB
disk_device.capacityInKB = max(units.Ki, int(size_kb))
if controller_key < 0:
disk_device.key = controller_key - 1
else:
disk_device.key = -101
disk_device.unitNumber = 0
disk_device.controllerKey = controller_key
disk_device.backing = self._create_disk_backing(disk_type,
vmdk_ds_file_path)
disk_spec = cf.create('ns0:VirtualDeviceConfigSpec')
disk_spec.operation = 'add'
if vmdk_ds_file_path is None:
disk_spec.fileOperation = 'create'
disk_spec.device = disk_device
if profile_id:
disk_profile = cf.create('ns0:VirtualMachineDefinedProfileSpec')
disk_profile.profileId = profile_id
disk_spec.profile = [disk_profile]
return disk_spec
def _create_specs_for_disk_add(self, size_kb, disk_type, adapter_type,
profile_id, vmdk_ds_file_path=None):
"""Create controller and disk config specs for adding a new disk.
:param size_kb: disk size in KB
:param disk_type: disk provisioning type
:param adapter_type: disk adapter type
:param profile_id: storage policy profile identification
:param vmdk_ds_file_path: Optional datastore file path of an existing
virtual disk. If specified, file backing is
not created for the virtual disk.
:return: list containing controller and disk config specs
"""
controller_spec = None
if adapter_type == 'ide':
# For IDE disks, use one of the default IDE controllers (with keys
# 200 and 201) created as part of backing VM creation.
controller_key = 200
else:
controller_spec = self._create_controller_config_spec(adapter_type)
controller_key = controller_spec.device.key
disk_spec = self._create_virtual_disk_config_spec(size_kb,
disk_type,
controller_key,
profile_id,
vmdk_ds_file_path)
specs = [disk_spec]
if controller_spec is not None:
specs.append(controller_spec)
return specs
def _get_extra_config_option_values(self, extra_config):
cf = self._session.vim.client.factory
option_values = []
for key, value in extra_config.items():
opt = cf.create('ns0:OptionValue')
opt.key = key
opt.value = value
option_values.append(opt)
return option_values
def _get_create_spec_disk_less(self, name, ds_name, profileId=None,
extra_config=None):
"""Return spec for creating disk-less backing.
:param name: Name of the backing
:param ds_name: Datastore name where the disk is to be provisioned
:param profileId: Storage profile ID for the backing
:param extra_config: Key-value pairs to be written to backing's
extra-config
:return: Spec for creation
"""
cf = self._session.vim.client.factory
vm_file_info = cf.create('ns0:VirtualMachineFileInfo')
vm_file_info.vmPathName = '[%s]' % ds_name
create_spec = cf.create('ns0:VirtualMachineConfigSpec')
create_spec.name = name
create_spec.guestId = 'otherGuest'
create_spec.numCPUs = 1
create_spec.memoryMB = 128
create_spec.files = vm_file_info
# Set the hardware version to a compatible version supported by
# vSphere 5.0. This will ensure that the backing VM can be migrated
# without any incompatibility issues in a mixed cluster of ESX hosts
# with versions 5.0 or above.
create_spec.version = "vmx-08"
if profileId:
vmProfile = cf.create('ns0:VirtualMachineDefinedProfileSpec')
vmProfile.profileId = profileId
create_spec.vmProfile = [vmProfile]
if extra_config:
create_spec.extraConfig = self._get_extra_config_option_values(
extra_config)
return create_spec
def get_create_spec(self, name, size_kb, disk_type, ds_name,
profile_id=None, adapter_type='lsiLogic',
extra_config=None):
"""Return spec for creating backing with a single disk.
:param name: name of the backing
:param size_kb: disk size in KB
:param disk_type: disk provisioning type
:param ds_name: datastore name where the disk is to be provisioned
:param profile_id: storage policy profile identification
:param adapter_type: disk adapter type
:param extra_config: key-value pairs to be written to backing's
extra-config
:return: spec for creation
"""
create_spec = self._get_create_spec_disk_less(
name, ds_name, profileId=profile_id, extra_config=extra_config)
create_spec.deviceChange = self._create_specs_for_disk_add(
size_kb, disk_type, adapter_type, profile_id)
return create_spec
def _create_backing_int(self, folder, resource_pool, host, create_spec):
"""Helper for create backing methods."""
LOG.debug("Creating volume backing with spec: %s.", create_spec)
task = self._session.invoke_api(self._session.vim, 'CreateVM_Task',
folder, config=create_spec,
pool=resource_pool, host=host)
task_info = self._session.wait_for_task(task)
backing = task_info.result
LOG.info(_LI("Successfully created volume backing: %s."), backing)
return backing
def create_backing(self, name, size_kb, disk_type, folder, resource_pool,
host, ds_name, profileId=None, adapter_type='lsiLogic',
extra_config=None):
"""Create backing for the volume.
Creates a VM with one VMDK based on the given inputs.
:param name: Name of the backing
:param size_kb: Size in KB of the backing
:param disk_type: VMDK type for the disk
:param folder: Folder, where to create the backing under
:param resource_pool: Resource pool reference
:param host: Host reference
:param ds_name: Datastore name where the disk is to be provisioned
:param profileId: Storage profile ID to be associated with backing
:param adapter_type: Disk adapter type
:param extra_config: Key-value pairs to be written to backing's
extra-config
:return: Reference to the created backing entity
"""
LOG.debug("Creating volume backing with name: %(name)s "
"disk_type: %(disk_type)s size_kb: %(size_kb)s "
"adapter_type: %(adapter_type)s profileId: %(profile)s at "
"folder: %(folder)s resource_pool: %(resource_pool)s "
"host: %(host)s datastore_name: %(ds_name)s.",
{'name': name, 'disk_type': disk_type, 'size_kb': size_kb,
'folder': folder, 'resource_pool': resource_pool,
'ds_name': ds_name, 'profile': profileId, 'host': host,
'adapter_type': adapter_type})
create_spec = self.get_create_spec(
name, size_kb, disk_type, ds_name, profile_id=profileId,
adapter_type=adapter_type, extra_config=extra_config)
return self._create_backing_int(folder, resource_pool, host,
create_spec)
def create_backing_disk_less(self, name, folder, resource_pool,
host, ds_name, profileId=None,
extra_config=None):
"""Create disk-less volume backing.
This type of backing is useful for creating volume from image. The
downloaded image from the image service can be copied to a virtual
disk of desired provisioning type and added to the backing VM.
:param name: Name of the backing
:param folder: Folder where the backing is created
:param resource_pool: Resource pool reference
:param host: Host reference
:param ds_name: Name of the datastore used for VM storage
:param profileId: Storage profile ID to be associated with backing
:param extra_config: Key-value pairs to be written to backing's
extra-config
:return: Reference to the created backing entity
"""
LOG.debug("Creating disk-less volume backing with name: %(name)s "
"profileId: %(profile)s at folder: %(folder)s "
"resource pool: %(resource_pool)s host: %(host)s "
"datastore_name: %(ds_name)s.",
{'name': name, 'profile': profileId, 'folder': folder,
'resource_pool': resource_pool, 'host': host,
'ds_name': ds_name})
create_spec = self._get_create_spec_disk_less(
name, ds_name, profileId=profileId, extra_config=extra_config)
return self._create_backing_int(folder, resource_pool, host,
create_spec)
def get_datastore(self, backing):
"""Get datastore where the backing resides.
:param backing: Reference to the backing
:return: Datastore reference to which the backing belongs
"""
return self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, backing,
'datastore').ManagedObjectReference[0]
def get_summary(self, datastore):
"""Get datastore summary.
:param datastore: Reference to the datastore
:return: 'summary' property of the datastore
"""
return self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, datastore,
'summary')
def _create_relocate_spec_disk_locator(self, datastore, disk_type,
disk_device):
"""Creates spec for disk type conversion during relocate."""
cf = self._session.vim.client.factory
disk_locator = cf.create("ns0:VirtualMachineRelocateSpecDiskLocator")
disk_locator.datastore = datastore
disk_locator.diskId = disk_device.key
disk_locator.diskBackingInfo = self._create_disk_backing(disk_type,
None)
return disk_locator
def _get_relocate_spec(self, datastore, resource_pool, host,
disk_move_type, disk_type=None, disk_device=None):
"""Return spec for relocating volume backing.
:param datastore: Reference to the datastore
:param resource_pool: Reference to the resource pool
:param host: Reference to the host
:param disk_move_type: Disk move type option
:param disk_type: Destination disk type
:param disk_device: Virtual device corresponding to the disk
:return: Spec for relocation
"""
cf = self._session.vim.client.factory
relocate_spec = cf.create('ns0:VirtualMachineRelocateSpec')
relocate_spec.datastore = datastore
relocate_spec.pool = resource_pool
relocate_spec.host = host
relocate_spec.diskMoveType = disk_move_type
if disk_type is not None and disk_device is not None:
disk_locator = self._create_relocate_spec_disk_locator(datastore,
disk_type,
disk_device)
relocate_spec.disk = [disk_locator]
LOG.debug("Spec for relocating the backing: %s.", relocate_spec)
return relocate_spec
def relocate_backing(
self, backing, datastore, resource_pool, host, disk_type=None):
"""Relocates backing to the input datastore and resource pool.
The implementation uses moveAllDiskBackingsAndAllowSharing disk move
type.
:param backing: Reference to the backing
:param datastore: Reference to the datastore
:param resource_pool: Reference to the resource pool
:param host: Reference to the host
:param disk_type: destination disk type
"""
LOG.debug("Relocating backing: %(backing)s to datastore: %(ds)s "
"and resource pool: %(rp)s with destination disk type: "
"%(disk_type)s.",
{'backing': backing,
'ds': datastore,
'rp': resource_pool,
'disk_type': disk_type})
# Relocate the volume backing
disk_move_type = 'moveAllDiskBackingsAndAllowSharing'
disk_device = None
if disk_type is not None:
disk_device = self._get_disk_device(backing)
relocate_spec = self._get_relocate_spec(datastore, resource_pool, host,
disk_move_type, disk_type,
disk_device)
task = self._session.invoke_api(self._session.vim, 'RelocateVM_Task',
backing, spec=relocate_spec)
LOG.debug("Initiated relocation of volume backing: %s.", backing)
self._session.wait_for_task(task)
LOG.info(_LI("Successfully relocated volume backing: %(backing)s "
"to datastore: %(ds)s and resource pool: %(rp)s."),
{'backing': backing, 'ds': datastore, 'rp': resource_pool})
def move_backing_to_folder(self, backing, folder):
"""Move the volume backing to the folder.
:param backing: Reference to the backing
:param folder: Reference to the folder
"""
LOG.debug("Moving backing: %(backing)s to folder: %(fol)s.",
{'backing': backing, 'fol': folder})
task = self._session.invoke_api(self._session.vim,
'MoveIntoFolder_Task', folder,
list=[backing])
LOG.debug("Initiated move of volume backing: %(backing)s into the "
"folder: %(fol)s.", {'backing': backing, 'fol': folder})
self._session.wait_for_task(task)
LOG.info(_LI("Successfully moved volume "
"backing: %(backing)s into the "
"folder: %(fol)s."), {'backing': backing, 'fol': folder})
def create_snapshot(self, backing, name, description, quiesce=False):
"""Create snapshot of the backing with given name and description.
:param backing: Reference to the backing entity
:param name: Snapshot name
:param description: Snapshot description
:param quiesce: Whether to quiesce the backing when taking snapshot
:return: Created snapshot entity reference
"""
LOG.debug("Snapshoting backing: %(backing)s with name: %(name)s.",
{'backing': backing, 'name': name})
task = self._session.invoke_api(self._session.vim,
'CreateSnapshot_Task',
backing, name=name,
description=description,
memory=False, quiesce=quiesce)
LOG.debug("Initiated snapshot of volume backing: %(backing)s "
"named: %(name)s.", {'backing': backing, 'name': name})
task_info = self._session.wait_for_task(task)
snapshot = task_info.result
LOG.info(_LI("Successfully created snapshot: %(snap)s for volume "
"backing: %(backing)s."),
{'snap': snapshot, 'backing': backing})
return snapshot
@staticmethod
def _get_snapshot_from_tree(name, root):
"""Get snapshot by name from the snapshot tree root.
:param name: Snapshot name
:param root: Current root node in the snapshot tree
:return: None in the snapshot tree with given snapshot name
"""
if not root:
return None
if root.name == name:
return root.snapshot
if (not hasattr(root, 'childSnapshotList') or
not root.childSnapshotList):
# When root does not have children, the childSnapshotList attr
# is missing sometime. Adding an additional check.
return None
for node in root.childSnapshotList:
snapshot = VMwareVolumeOps._get_snapshot_from_tree(name, node)
if snapshot:
return snapshot
def get_snapshot(self, backing, name):
"""Get snapshot of the backing with given name.
:param backing: Reference to the backing entity
:param name: Snapshot name
:return: Snapshot entity of the backing with given name
"""
snapshot = self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, backing,
'snapshot')
if not snapshot or not snapshot.rootSnapshotList:
return None
for root in snapshot.rootSnapshotList:
return VMwareVolumeOps._get_snapshot_from_tree(name, root)
def snapshot_exists(self, backing):
"""Check if the given backing contains snapshots."""
snapshot = self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, backing,
'snapshot')
if snapshot is None or snapshot.rootSnapshotList is None:
return False
return len(snapshot.rootSnapshotList) != 0
def delete_snapshot(self, backing, name):
"""Delete a given snapshot from volume backing.
:param backing: Reference to the backing entity
:param name: Snapshot name
"""
LOG.debug("Deleting the snapshot: %(name)s from backing: "
"%(backing)s.",
{'name': name, 'backing': backing})
snapshot = self.get_snapshot(backing, name)
if not snapshot:
LOG.info(_LI("Did not find the snapshot: %(name)s for backing: "
"%(backing)s. Need not delete anything."),
{'name': name, 'backing': backing})
return
task = self._session.invoke_api(self._session.vim,
'RemoveSnapshot_Task',
snapshot, removeChildren=False)
LOG.debug("Initiated snapshot: %(name)s deletion for backing: "
"%(backing)s.",
{'name': name, 'backing': backing})
self._session.wait_for_task(task)
LOG.info(_LI("Successfully deleted snapshot: %(name)s of backing: "
"%(backing)s."), {'backing': backing, 'name': name})
def _get_folder(self, backing):
"""Get parent folder of the backing.
:param backing: Reference to the backing entity
:return: Reference to parent folder of the backing entity
"""
return self._get_parent(backing, 'Folder')
def _get_clone_spec(self, datastore, disk_move_type, snapshot, backing,
disk_type, host=None, resource_pool=None,
extra_config=None):
"""Get the clone spec.
:param datastore: Reference to datastore
:param disk_move_type: Disk move type
:param snapshot: Reference to snapshot
:param backing: Source backing VM
:param disk_type: Disk type of clone
:param host: Target host
:param resource_pool: Target resource pool
:param extra_config: Key-value pairs to be written to backing's
extra-config
:return: Clone spec
"""
if disk_type is not None:
disk_device = self._get_disk_device(backing)
else:
disk_device = None
relocate_spec = self._get_relocate_spec(datastore, resource_pool, host,
disk_move_type, disk_type,
disk_device)
cf = self._session.vim.client.factory
clone_spec = cf.create('ns0:VirtualMachineCloneSpec')
clone_spec.location = relocate_spec
clone_spec.powerOn = False
clone_spec.template = False
clone_spec.snapshot = snapshot
if extra_config:
config_spec = cf.create('ns0:VirtualMachineConfigSpec')
config_spec.extraConfig = self._get_extra_config_option_values(
extra_config)
clone_spec.config = config_spec
LOG.debug("Spec for cloning the backing: %s.", clone_spec)
return clone_spec
def clone_backing(self, name, backing, snapshot, clone_type, datastore,
disk_type=None, host=None, resource_pool=None,
extra_config=None, folder=None):
"""Clone backing.
If the clone_type is 'full', then a full clone of the source volume
backing will be created. Else, if it is 'linked', then a linked clone
of the source volume backing will be created.
:param name: Name for the clone
:param backing: Reference to the backing entity
:param snapshot: Snapshot point from which the clone should be done
:param clone_type: Whether a full clone or linked clone is to be made
:param datastore: Reference to the datastore entity
:param disk_type: Disk type of the clone
:param host: Target host
:param resource_pool: Target resource pool
:param extra_config: Key-value pairs to be written to backing's
extra-config
:param folder: The location of the clone
"""
LOG.debug("Creating a clone of backing: %(back)s, named: %(name)s, "
"clone type: %(type)s from snapshot: %(snap)s on "
"resource pool: %(resource_pool)s, host: %(host)s, "
"datastore: %(ds)s with disk type: %(disk_type)s.",
{'back': backing, 'name': name, 'type': clone_type,
'snap': snapshot, 'ds': datastore, 'disk_type': disk_type,
'host': host, 'resource_pool': resource_pool})
if folder is None:
# Use source folder as the location of the clone.
folder = self._get_folder(backing)
if clone_type == LINKED_CLONE_TYPE:
disk_move_type = 'createNewChildDiskBacking'
else:
disk_move_type = 'moveAllDiskBackingsAndDisallowSharing'
clone_spec = self._get_clone_spec(
datastore, disk_move_type, snapshot, backing, disk_type, host=host,
resource_pool=resource_pool, extra_config=extra_config)
task = self._session.invoke_api(self._session.vim, 'CloneVM_Task',
backing, folder=folder, name=name,
spec=clone_spec)
LOG.debug("Initiated clone of backing: %s.", name)
task_info = self._session.wait_for_task(task)
new_backing = task_info.result
LOG.info(_LI("Successfully created clone: %s."), new_backing)
return new_backing
def _reconfigure_backing(self, backing, reconfig_spec):
"""Reconfigure backing VM with the given spec."""
LOG.debug("Reconfiguring backing VM: %(backing)s with spec: %(spec)s.",
{'backing': backing,
'spec': reconfig_spec})
reconfig_task = self._session.invoke_api(self._session.vim,
"ReconfigVM_Task",
backing,
spec=reconfig_spec)
LOG.debug("Task: %s created for reconfiguring backing VM.",
reconfig_task)
self._session.wait_for_task(reconfig_task)
def attach_disk_to_backing(self, backing, size_in_kb, disk_type,
adapter_type, profile_id, vmdk_ds_file_path):
"""Attach an existing virtual disk to the backing VM.
:param backing: reference to the backing VM
:param size_in_kb: disk size in KB
:param disk_type: virtual disk type
:param adapter_type: disk adapter type
:param profile_id: storage policy profile identification
:param vmdk_ds_file_path: datastore file path of the virtual disk to
be attached
"""
LOG.debug("Reconfiguring backing VM: %(backing)s to add new disk: "
"%(path)s with size (KB): %(size)d and adapter type: "
"%(adapter_type)s.",
{'backing': backing,
'path': vmdk_ds_file_path,
'size': size_in_kb,
'adapter_type': adapter_type})
cf = self._session.vim.client.factory
reconfig_spec = cf.create('ns0:VirtualMachineConfigSpec')
specs = self._create_specs_for_disk_add(
size_in_kb,
disk_type,
adapter_type,
profile_id,
vmdk_ds_file_path=vmdk_ds_file_path)
reconfig_spec.deviceChange = specs
self._reconfigure_backing(backing, reconfig_spec)
LOG.debug("Backing VM: %s reconfigured with new disk.", backing)
def _create_spec_for_disk_remove(self, disk_device):
cf = self._session.vim.client.factory
disk_spec = cf.create('ns0:VirtualDeviceConfigSpec')
disk_spec.operation = 'remove'
disk_spec.device = disk_device
return disk_spec
def detach_disk_from_backing(self, backing, disk_device):
"""Detach the given disk from backing."""
LOG.debug("Reconfiguring backing VM: %(backing)s to remove disk: "
"%(disk_device)s.",
{'backing': backing, 'disk_device': disk_device})
cf = self._session.vim.client.factory
reconfig_spec = cf.create('ns0:VirtualMachineConfigSpec')
spec = self._create_spec_for_disk_remove(disk_device)
reconfig_spec.deviceChange = [spec]
self._reconfigure_backing(backing, reconfig_spec)
def rename_backing(self, backing, new_name):
"""Rename backing VM.
:param backing: VM to be renamed
:param new_name: new VM name
"""
LOG.info(_LI("Renaming backing VM: %(backing)s to %(new_name)s."),
{'backing': backing,
'new_name': new_name})
rename_task = self._session.invoke_api(self._session.vim,
"Rename_Task",
backing,
newName=new_name)
LOG.debug("Task: %s created for renaming VM.", rename_task)
self._session.wait_for_task(rename_task)
LOG.info(_LI("Backing VM: %(backing)s renamed to %(new_name)s."),
{'backing': backing,
'new_name': new_name})
def change_backing_profile(self, backing, profile_id):
"""Change storage profile of the backing VM.
The current profile is removed if the new profile is None.
"""
LOG.debug("Reconfiguring backing VM: %(backing)s to change profile to:"
" %(profile)s.",
{'backing': backing,
'profile': profile_id})
cf = self._session.vim.client.factory
reconfig_spec = cf.create('ns0:VirtualMachineConfigSpec')
if profile_id is None:
vm_profile = cf.create('ns0:VirtualMachineEmptyProfileSpec')
vm_profile.dynamicType = 'profile'
else:
vm_profile = cf.create('ns0:VirtualMachineDefinedProfileSpec')
vm_profile.profileId = profile_id.uniqueId
reconfig_spec.vmProfile = [vm_profile]
self._reconfigure_backing(backing, reconfig_spec)
LOG.debug("Backing VM: %(backing)s reconfigured with new profile: "
"%(profile)s.",
{'backing': backing,
'profile': profile_id})
def update_backing_disk_uuid(self, backing, disk_uuid):
"""Update backing VM's disk UUID.
:param backing: Reference to backing VM
:param disk_uuid: New disk UUID
"""
LOG.debug("Reconfiguring backing VM: %(backing)s to change disk UUID "
"to: %(disk_uuid)s.",
{'backing': backing,
'disk_uuid': disk_uuid})
disk_device = self._get_disk_device(backing)
disk_device.backing.uuid = disk_uuid
cf = self._session.vim.client.factory
disk_spec = cf.create('ns0:VirtualDeviceConfigSpec')
disk_spec.device = disk_device
disk_spec.operation = 'edit'
reconfig_spec = cf.create('ns0:VirtualMachineConfigSpec')
reconfig_spec.deviceChange = [disk_spec]
self._reconfigure_backing(backing, reconfig_spec)
LOG.debug("Backing VM: %(backing)s reconfigured with new disk UUID: "
"%(disk_uuid)s.",
{'backing': backing,
'disk_uuid': disk_uuid})
def update_backing_extra_config(self, backing, extra_config):
cf = self._session.vim.client.factory
reconfig_spec = cf.create('ns0:VirtualMachineConfigSpec')
reconfig_spec.extraConfig = self._get_extra_config_option_values(
extra_config)
self._reconfigure_backing(backing, reconfig_spec)
LOG.debug("Backing: %(backing)s reconfigured with extra config: "
"%(extra_config)s.",
{'backing': backing,
'extra_config': extra_config})
def delete_file(self, file_path, datacenter=None):
"""Delete file or folder on the datastore.
:param file_path: Datastore path of the file or folder
"""
LOG.debug("Deleting file: %(file)s under datacenter: %(dc)s.",
{'file': file_path, 'dc': datacenter})
fileManager = self._session.vim.service_content.fileManager
task = self._session.invoke_api(self._session.vim,
'DeleteDatastoreFile_Task',
fileManager,
name=file_path,
datacenter=datacenter)
LOG.debug("Initiated deletion via task: %s.", task)
self._session.wait_for_task(task)
LOG.info(_LI("Successfully deleted file: %s."), file_path)
def create_datastore_folder(self, ds_name, folder_path, datacenter):
"""Creates a datastore folder.
This method returns silently if the folder already exists.
:param ds_name: datastore name
:param folder_path: path of folder to create
:param datacenter: datacenter of target datastore
"""
fileManager = self._session.vim.service_content.fileManager
ds_folder_path = "[%s] %s" % (ds_name, folder_path)
LOG.debug("Creating datastore folder: %s.", ds_folder_path)
try:
self._session.invoke_api(self._session.vim,
'MakeDirectory',
fileManager,
name=ds_folder_path,
datacenter=datacenter)
LOG.info(_LI("Created datastore folder: %s."), folder_path)
except exceptions.FileAlreadyExistsException:
LOG.debug("Datastore folder: %s already exists.", folder_path)
def get_path_name(self, backing):
"""Get path name of the backing.
:param backing: Reference to the backing entity
:return: Path name of the backing
"""
return self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, backing,
'config.files').vmPathName
def get_entity_name(self, entity):
"""Get name of the managed entity.
:param entity: Reference to the entity
:return: Name of the managed entity
"""
return self._session.invoke_api(vim_util, 'get_object_property',
self._session.vim, entity, 'name')
def _get_disk_device(self, backing):
"""Get the virtual device corresponding to disk."""
hardware_devices = self._session.invoke_api(vim_util,
'get_object_property',
self._session.vim,
backing,
'config.hardware.device')
if hardware_devices.__class__.__name__ == "ArrayOfVirtualDevice":
hardware_devices = hardware_devices.VirtualDevice
for device in hardware_devices:
if device.__class__.__name__ == "VirtualDisk":
return device
LOG.error(_LE("Virtual disk device of "
"backing: %s not found."), backing)
raise vmdk_exceptions.VirtualDiskNotFoundException()
def get_vmdk_path(self, backing):
"""Get the vmdk file name of the backing.
The vmdk file path of the backing returned is of the form:
"[datastore1] my_folder/my_vm.vmdk"
:param backing: Reference to the backing
:return: VMDK file path of the backing
"""
disk_device = self._get_disk_device(backing)
backing = disk_device.backing
if backing.__class__.__name__ != "VirtualDiskFlatVer2BackingInfo":
msg = _("Invalid disk backing: %s.") % backing.__class__.__name__
LOG.error(msg)
raise AssertionError(msg)
return backing.fileName
def get_disk_size(self, backing):
"""Get disk size of the backing.
:param backing: backing VM reference
:return: disk size in bytes
"""
disk_device = self._get_disk_device(backing)
return disk_device.capacityInKB * units.Ki
def _get_virtual_disk_create_spec(self, size_in_kb, adapter_type,
disk_type):
"""Return spec for file-backed virtual disk creation."""
cf = self._session.vim.client.factory
spec = cf.create('ns0:FileBackedVirtualDiskSpec')
spec.capacityKb = size_in_kb
spec.adapterType = VirtualDiskAdapterType.get_adapter_type(
adapter_type)
spec.diskType = VirtualDiskType.get_virtual_disk_type(disk_type)
return spec
def create_virtual_disk(self, dc_ref, vmdk_ds_file_path, size_in_kb,
adapter_type='busLogic', disk_type='preallocated'):
"""Create virtual disk with the given settings.
:param dc_ref: datacenter reference
:param vmdk_ds_file_path: datastore file path of the virtual disk
:param size_in_kb: disk size in KB
:param adapter_type: disk adapter type
:param disk_type: vmdk type
"""
virtual_disk_spec = self._get_virtual_disk_create_spec(size_in_kb,
adapter_type,
disk_type)
LOG.debug("Creating virtual disk with spec: %s.", virtual_disk_spec)
disk_manager = self._session.vim.service_content.virtualDiskManager
task = self._session.invoke_api(self._session.vim,
'CreateVirtualDisk_Task',
disk_manager,
name=vmdk_ds_file_path,
datacenter=dc_ref,
spec=virtual_disk_spec)
LOG.debug("Task: %s created for virtual disk creation.", task)
self._session.wait_for_task(task)
LOG.debug("Created virtual disk with spec: %s.", virtual_disk_spec)
def create_flat_extent_virtual_disk_descriptor(
self, dc_ref, path, size_in_kb, adapter_type, disk_type):
"""Create descriptor for a single flat extent virtual disk.
To create the descriptor, we create a virtual disk and delete its flat
extent.
:param dc_ref: reference to the datacenter
:param path: descriptor datastore file path
:param size_in_kb: size of the virtual disk in KB
:param adapter_type: virtual disk adapter type
:param disk_type: type of the virtual disk
"""
LOG.debug("Creating descriptor: %(path)s with size (KB): %(size)s, "
"adapter_type: %(adapter_type)s and disk_type: "
"%(disk_type)s.",
{'path': path.get_descriptor_ds_file_path(),
'size': size_in_kb,
'adapter_type': adapter_type,
'disk_type': disk_type
})
self.create_virtual_disk(dc_ref, path.get_descriptor_ds_file_path(),
size_in_kb, adapter_type, disk_type)
self.delete_file(path.get_flat_extent_ds_file_path(), dc_ref)
LOG.debug("Created descriptor: %s.",
path.get_descriptor_ds_file_path())
def copy_vmdk_file(self, src_dc_ref, src_vmdk_file_path,
dest_vmdk_file_path, dest_dc_ref=None):
"""Copy contents of the src vmdk file to dest vmdk file.
:param src_dc_ref: Reference to datacenter containing src datastore
:param src_vmdk_file_path: Source vmdk file path
:param dest_vmdk_file_path: Destination vmdk file path
:param dest_dc_ref: Reference to datacenter of dest datastore.
If unspecified, source datacenter is used.
"""
LOG.debug('Copying disk: %(src)s to %(dest)s.',
{'src': src_vmdk_file_path,
'dest': dest_vmdk_file_path})
dest_dc_ref = dest_dc_ref or src_dc_ref
diskMgr = self._session.vim.service_content.virtualDiskManager
task = self._session.invoke_api(self._session.vim,
'CopyVirtualDisk_Task',
diskMgr,
sourceName=src_vmdk_file_path,
sourceDatacenter=src_dc_ref,
destName=dest_vmdk_file_path,
destDatacenter=dest_dc_ref,
force=True)
LOG.debug("Initiated copying disk data via task: %s.", task)
self._session.wait_for_task(task)
LOG.info(_LI("Successfully copied disk at: %(src)s to: %(dest)s."),
{'src': src_vmdk_file_path, 'dest': dest_vmdk_file_path})
def move_vmdk_file(self, src_dc_ref, src_vmdk_file_path,
dest_vmdk_file_path, dest_dc_ref=None):
"""Move the given vmdk file to another datastore location.
:param src_dc_ref: Reference to datacenter containing src datastore
:param src_vmdk_file_path: Source vmdk file path
:param dest_vmdk_file_path: Destination vmdk file path
:param dest_dc_ref: Reference to datacenter of dest datastore.
If unspecified, source datacenter is used.
"""
LOG.debug('Moving disk: %(src)s to %(dest)s.',
{'src': src_vmdk_file_path, 'dest': dest_vmdk_file_path})
dest_dc_ref = dest_dc_ref or src_dc_ref
diskMgr = self._session.vim.service_content.virtualDiskManager
task = self._session.invoke_api(self._session.vim,
'MoveVirtualDisk_Task',
diskMgr,
sourceName=src_vmdk_file_path,
sourceDatacenter=src_dc_ref,
destName=dest_vmdk_file_path,
destDatacenter=dest_dc_ref,
force=True)
self._session.wait_for_task(task)
def delete_vmdk_file(self, vmdk_file_path, dc_ref):
"""Delete given vmdk files.
:param vmdk_file_path: VMDK file path to be deleted
:param dc_ref: Reference to datacenter that contains this VMDK file
"""
LOG.debug("Deleting vmdk file: %s.", vmdk_file_path)
diskMgr = self._session.vim.service_content.virtualDiskManager
task = self._session.invoke_api(self._session.vim,
'DeleteVirtualDisk_Task',
diskMgr,
name=vmdk_file_path,
datacenter=dc_ref)
LOG.debug("Initiated deleting vmdk file via task: %s.", task)
self._session.wait_for_task(task)
LOG.info(_LI("Deleted vmdk file: %s."), vmdk_file_path)
def get_profile(self, backing):
"""Query storage profile associated with the given backing.
:param backing: backing reference
:return: profile name
"""
profile_ids = pbm.get_profiles(self._session, backing)
if profile_ids:
return pbm.get_profiles_by_ids(self._session, profile_ids)[0].name
def _get_all_clusters(self):
clusters = {}
retrieve_result = self._session.invoke_api(vim_util, 'get_objects',
self._session.vim,
'ClusterComputeResource',
self._max_objects)
while retrieve_result:
if retrieve_result.objects:
for cluster in retrieve_result.objects:
name = urllib.parse.unquote(cluster.propSet[0].val)
clusters[name] = cluster.obj
retrieve_result = self.continue_retrieval(retrieve_result)
return clusters
def get_cluster_refs(self, names):
"""Get references to given clusters.
:param names: list of cluster names
:return: Dictionary of cluster names to references
"""
clusters_ref = {}
clusters = self._get_all_clusters()
for name in names:
if name not in clusters:
LOG.error(_LE("Compute cluster: %s not found."), name)
raise vmdk_exceptions.ClusterNotFoundException(cluster=name)
clusters_ref[name] = clusters[name]
return clusters_ref
def get_cluster_hosts(self, cluster):
"""Get hosts in the given cluster.
:param cluster: cluster reference
:return: references to hosts in the cluster
"""
hosts = self._session.invoke_api(vim_util,
'get_object_property',
self._session.vim,
cluster,
'host')
host_refs = []
if hosts and hosts.ManagedObjectReference:
host_refs.extend(hosts.ManagedObjectReference)
return host_refs
def get_entity_by_inventory_path(self, path):
"""Returns the managed object identified by the given inventory path.
:param path: Inventory path
:return: Reference to the managed object
"""
return self._session.invoke_api(
self._session.vim,
"FindByInventoryPath",
self._session.vim.service_content.searchIndex,
inventoryPath=path)
def _get_disk_devices(self, vm):
disk_devices = []
hardware_devices = self._session.invoke_api(vim_util,
'get_object_property',
self._session.vim,
vm,
'config.hardware.device')
if hardware_devices.__class__.__name__ == "ArrayOfVirtualDevice":
hardware_devices = hardware_devices.VirtualDevice
for device in hardware_devices:
if device.__class__.__name__ == "VirtualDisk":
disk_devices.append(device)
return disk_devices
def get_disk_device(self, vm, vmdk_path):
"""Get the disk device of the VM which corresponds to the given path.
:param vm: VM reference
:param vmdk_path: Datastore path of virtual disk
:return: Matching disk device
"""
disk_devices = self._get_disk_devices(vm)
for disk_device in disk_devices:
backing = disk_device.backing
if (backing.__class__.__name__ == "VirtualDiskFlatVer2BackingInfo"
and backing.fileName == vmdk_path):
return disk_device
| cloudbase/cinder | cinder/volume/drivers/vmware/volumeops.py | Python | apache-2.0 | 70,976 |
# This file is part of Indico.
# Copyright (C) 2002 - 2022 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from indico.core import signals
from indico.util.decorators import classproperty
from indico.util.signals import named_objects_from_signal
class Condition:
"""Base class for conditions.
`Condition`s allow you to define criteria to match on and then evaluate
those criteria and check whether there is a match (as part of a rule).
"""
#: The name of the condition. Must be unique within the context
#: where the condition is used
name = None
#: Whether the condition must always be present
required = False
#: A short description of the condition.
description = None
#: {value: condition_name} containing conditions that are allowed for each value type
#: non-specified values are considered as compatible with all other conditions.
compatible_with = None
@classproperty
@classmethod
def friendly_name(cls):
return cls.name
@classmethod
def is_used(cls, rule):
"""Check whether the condition is used in a rule."""
return rule.get(cls.name) is not None
@classmethod
def is_none(cls, **kwargs):
"""Check whether the condition requires a null value.
Inheriting methods should overload this
"""
raise NotImplementedError
@classmethod
def get_available_values(cls, **kwargs):
"""Get a dict of values that can be used for the condition.
Subclasses are encouraged to explicitly specify the arguments
they expect instead of using ``**kwargs``.
The key of each item is the actual value that will be used
in the check while the value is what is going to be displayed.
:param kwargs: arguments specific to the condition's context
"""
raise NotImplementedError
@classmethod
def _clean_values(cls, values, **kwargs):
return list(cls.get_available_values(**kwargs).keys() & set(values))
@classmethod
def check(cls, values, **kwargs):
"""Check whether the condition is matched
Subclasses are encouraged to explicitly specify the arguments
they expect instead of using ``**kwargs``.
This method is only called if the rule is active and if there
are valid values (as defined by `get_available_values`).
:param values: a collection of values that are accepted for a
match. it never contains values which are not
available anymore.
:param kwargs: arguments specific to the conditions's context
"""
raise NotImplementedError
def get_conditions(context, **kwargs):
"""Get a dict of available conditions.
:param context: the context where the conditions are used
:param kwargs: arguments specific to the context
"""
return named_objects_from_signal(signals.core.get_conditions.send(context, **kwargs))
def check_rule(context, rule, **kwargs):
"""Check whether a rule matches.
:param context: the context where the conditions are used
:param rule: the rule to check
:param kwargs: arguments specific to the context
"""
for name, condition in get_conditions(context, **kwargs).items():
if not condition.is_used(rule):
if condition.required:
return False
else:
continue
values = condition._clean_values(rule[name], **kwargs)
if not values and condition.is_none(**kwargs):
# the property we're checking is null and the rule wants null
return True
elif not condition.check(values, **kwargs):
return False
return True
def get_missing_conditions(context, rule, **kwargs):
"""Get the set of missing required conditions.
:param context: the context where the conditions are used
:param rule: the rule to check
:param kwargs: arguments specific to the context
"""
rules = {condition for condition in get_conditions(context, **kwargs).values() if condition.required}
return {condition.friendly_name for condition in rules if not condition.is_used(rule)}
| indico/indico | indico/util/rules.py | Python | mit | 4,314 |
from scipy.linalg import eigh
from numpy.linalg import matrix_rank
from numpy import dot,conj,amax,argmin,zeros,eye,append,shape,diag,ones
from matplotlib.mlab import find
import copy
import warnings
import code
def argsort(seq):
return sorted(range(len(seq)),key=seq.__getitem__)
def bunch(instat,Nsamp,indices=False):
"""
Bunches the integers from instat into Nsamp bunches
while trying to even out the sum of the bunches integers.
This is not garanteed to be the best solution. It is just
a first guess, may probably be optimized further...
Returns the bunched integers.
if indices==True, also returns the bunched indices corresponding
to the original instat list.
"""
stats=list(instat)
args=argsort(stats)
stats.sort()
bunched=[[] for n in range(Nsamp)]
argbunched=copy.deepcopy(bunched)
n=0
while len(stats)>2*Nsamp-1:
bunched[n]+=[stats[0],stats[-1]]
argbunched[n]+=[args[0],args[-1]]
stats.pop()
stats.pop(0)
args.pop()
args.pop(0)
n+=1
if n==Nsamp:
n=0
bunchedstat=[sum(b) for b in bunched]
while len(stats)>0:
bunched[argmin(bunchedstat)]+=[stats[-1]]
argbunched[argmin(bunchedstat)]+=[args[-1]]
bunchedstat[argmin(bunchedstat)]+=stats[-1]
stats.pop()
args.pop()
if not indices:
return bunched
else:
return bunched,argbunched
def geneigh(A,B,tol=1e-12):
"""
Solves the generalized eigenvalue problem also in the case where A and B share a common
null-space. The eigenvalues corresponding to the null-space are given a Nan value.
The null-space is defined with the tolereance tol.
"""
# first check if there is a null-space issue
if matrix_rank(B,tol)==shape(B)[0]:
return eigh(A,B)
# first diagonalize the overlap matrix B
Be,Bv=eigh(B)
# rewrite the A matrix in the B-matrix eigenspace
At=dot(conj(Bv.T),dot(A,Bv))
Bt=diag(Be)
# detect shared null-space. that is given by the first n null eigenvalues of B
idx=find(Be>tol)
idx=idx[0]
# check that the B matrix null-space is shared by A.
m=amax(abs(At[0:idx,:].flatten()))
if m>tol:
warnings.warn('Maximum non-diagonal element in A written in B null-space is bigger than the tolerance \''+str(tol)+'\'.',UserWarning)
# diagonalize the non-null-space part of the problem
Et,Vt=eigh(At[idx:,idx:],Bt[idx:,idx:])
# define Ut, the change of basis in the non-truncated space
Ut=zeros(shape(A),A.dtype)
Ut[0:idx,0:idx]=eye(idx)
Ut[idx:,idx:]=Vt
U=dot(Bv,Ut)
E=append(float('NaN')*ones(idx),Et)
return E,U
| bdallapi/gpvmc | tools/vmc_legacy_utils/vmc_utils.py | Python | mit | 2,702 |
import citySimulator as cs
commercialComp = {'commercial':7, 'nonCommercial':1, 'industrial': 1}
nonCommercialComp = {'nonCommercial':8, 'commercial':1}
industrialComp = {'nonCommercial':1, 'industrial':8}
commercial = cs.regionType('commercial', '#ff0000', commercialComp)
nonCommercial = cs.regionType('nonCommercial', '#0000ff', nonCommercialComp)
industrial = cs.regionType('industrial', '#ff0000', industrialComp)
#print(nonCommercial.compBuffer)
#assigning relation factors
commercial.addRel(nonCommercial, 0.1)
commercial.addRel(industrial, 1)
industrial.addRel(nonCommercial, 0.1)
city = cs.region(600, nonCommercial, [0,0], False)
testLineV = cs.line([[300,0],[300,600]])
testLineV.relation['commercial'] = 100
testLineV.relation['nonCommercial'] = -100
testLineV.relation['industrial'] = 100
'''
NH9 = cs.line([[0,60],[200,150],[400,450],[600,540]])
NH9.relation['commercial'] = 100
NH9.relation['nonCommercial'] = -100
NH9.relation['industrial'] = -20
musi = cs.line([[0,350],[600,250]])
musi.relation['commercial'] = 100
musi.relation['nonCommercial'] = -100
musi.relation['industrial'] = -20
campus = cs.fence([[100,200],[200,100],[300,200],[300,400],[200,300],[100,300]])
'''
city.tessellate(4)
city.render()
testLineV.render()
'''
NH9.render()
musi.render()
'''
#campus.render()
cs.root.mainloop()
#print(campus.area()) | ranjeethmahankali/CitySimulator | fractalConcept/testCitySimpleLines.py | Python | apache-2.0 | 1,395 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Nicira, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless equired by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Salvatore Orlando, VMware
import netaddr
from oslo.config import cfg
from quantum.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from quantum.api.v2 import attributes
from quantum.common import constants
from quantum.common import exceptions as q_exc
from quantum.db import l3_db
from quantum.openstack.common import log as logging
from quantum.openstack.common.notifier import api as notifier_api
from quantum.plugins.nicira.nicira_nvp_plugin.common import (exceptions
as nvp_exc)
from quantum.plugins.nicira.nicira_nvp_plugin import NvpApiClient
LOG = logging.getLogger(__name__)
METADATA_DEFAULT_PREFIX = 30
METADATA_SUBNET_CIDR = '169.254.169.252/%d' % METADATA_DEFAULT_PREFIX
METADATA_GATEWAY_IP = '169.254.169.253'
class NvpMetadataAccess(object):
def _find_metadata_port(self, context, ports):
for port in ports:
for fixed_ip in port['fixed_ips']:
cidr = netaddr.IPNetwork(
self.get_subnet(context, fixed_ip['subnet_id'])['cidr'])
if cidr in netaddr.IPNetwork(METADATA_SUBNET_CIDR):
return port
def _create_metadata_access_network(self, context, router_id):
# This will still ensure atomicity on Quantum DB
with context.session.begin(subtransactions=True):
# Add network
# Network name is likely to be truncated on NVP
net_data = {'name': 'meta-%s' % router_id,
'tenant_id': '', # intentionally not set
'admin_state_up': True,
'port_security_enabled': False,
'shared': False,
'status': constants.NET_STATUS_ACTIVE}
meta_net = self.create_network(context,
{'network': net_data})
# Add subnet
subnet_data = {'network_id': meta_net['id'],
'tenant_id': '', # intentionally not set
'name': 'meta-%s' % router_id,
'ip_version': 4,
'shared': False,
'cidr': METADATA_SUBNET_CIDR,
'enable_dhcp': True,
# Ensure default allocation pool is generated
'allocation_pools': attributes.ATTR_NOT_SPECIFIED,
'gateway_ip': METADATA_GATEWAY_IP,
'dns_nameservers': [],
'host_routes': []}
meta_sub = self.create_subnet(context,
{'subnet': subnet_data})
self.add_router_interface(context, router_id,
{'subnet_id': meta_sub['id']})
if cfg.CONF.dhcp_agent_notification:
# We need to send a notification to the dhcp agent in
# order to start the metadata agent proxy
dhcp_notifier = dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
dhcp_notifier.notify(context, {'network': meta_net},
'network.create.end')
def _destroy_metadata_access_network(self, context, router_id, ports):
# This will still ensure atomicity on Quantum DB
with context.session.begin(subtransactions=True):
if ports:
meta_port = self._find_metadata_port(context, ports)
if not meta_port:
return
meta_net_id = meta_port['network_id']
self.remove_router_interface(
context, router_id, {'port_id': meta_port['id']})
# Remove network (this will remove the subnet too)
self.delete_network(context, meta_net_id)
if cfg.CONF.dhcp_agent_notification:
# We need to send a notification to the dhcp agent in
# order to stop the metadata agent proxy
dhcp_notifier = dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
dhcp_notifier.notify(context,
{'network': {'id': meta_net_id}},
'network.delete.end')
def _handle_metadata_access_network(self, context, router_id,
do_create=True):
if not cfg.CONF.NVP.enable_metadata_access_network:
LOG.debug(_("Metadata access network is disabled"))
return
if not cfg.CONF.allow_overlapping_ips:
LOG.warn(_("Overlapping IPs must be enabled in order to setup "
"the metadata access network"))
return
# As we'll use a different device_owner for metadata interface
# this query will return only 'real' router interfaces
ctx_elevated = context.elevated()
device_filter = {'device_id': [router_id],
'device_owner': [l3_db.DEVICE_OWNER_ROUTER_INTF]}
with ctx_elevated.session.begin(subtransactions=True):
# Retrieve ports without going to plugin
ports = [self._make_port_dict(port)
for port in self._get_ports_query(
ctx_elevated, filters=device_filter)
if port['fixed_ips']]
try:
if ports:
if (do_create and
not self._find_metadata_port(ctx_elevated, ports)):
self._create_metadata_access_network(context,
router_id)
elif len(ports) == 1:
# The only port left is the metadata port
self._destroy_metadata_access_network(context,
router_id,
ports)
else:
LOG.debug(_("No router interface found for router '%s'. "
"No metadata access network should be "
"created or destroyed"), router_id)
# TODO(salvatore-orlando): A better exception handling in the
# NVP plugin would allow us to improve error handling here
except (q_exc.QuantumException, nvp_exc.NvpPluginException,
NvpApiClient.NvpApiException):
# Any exception here should be regarded as non-fatal
LOG.exception(_("An error occurred while operating on the "
"metadata access network for router:'%s'"),
router_id)
| wallnerryan/quantum_migrate | quantum/plugins/nicira/nicira_nvp_plugin/common/metadata_access.py | Python | apache-2.0 | 7,472 |
#!/usr/bin/env python3
# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that we don't leak txs to inbound peers that we haven't yet announced to"""
from test_framework.messages import msg_getdata, CInv, MSG_TX
from test_framework.mininode import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
class P2PNode(P2PDataStore):
def on_inv(self, msg):
pass
class P2PLeakTxTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
gen_node = self.nodes[0] # The block and tx generating node
gen_node.generate(1)
inbound_peer = self.nodes[0].add_p2p_connection(P2PNode()) # An "attacking" inbound peer
MAX_REPEATS = 100
self.log.info("Running test up to {} times.".format(MAX_REPEATS))
for i in range(MAX_REPEATS):
self.log.info('Run repeat {}'.format(i + 1))
txid = gen_node.sendtoaddress(gen_node.getnewaddress(), 0.01)
want_tx = msg_getdata()
want_tx.inv.append(CInv(t=MSG_TX, h=int(txid, 16)))
inbound_peer.last_message.pop('notfound', None)
inbound_peer.send_and_ping(want_tx)
if inbound_peer.last_message.get('notfound'):
self.log.debug('tx {} was not yet announced to us.'.format(txid))
self.log.debug("node has responded with a notfound message. End test.")
assert_equal(inbound_peer.last_message['notfound'].vec[0].hash, int(txid, 16))
inbound_peer.last_message.pop('notfound')
break
else:
self.log.debug('tx {} was already announced to us. Try test again.'.format(txid))
assert int(txid, 16) in [inv.hash for inv in inbound_peer.last_message['inv'].inv]
if __name__ == '__main__':
P2PLeakTxTest().main()
| midnightmagic/bitcoin | test/functional/p2p_leak_tx.py | Python | mit | 2,153 |
#!/usr/bin/env python3
#
# Copyright 2016 Sarah Sharp <sharp@otter.technology>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This python script scrapes github issue information, including comments.
# The script dumps the json strings into files in a tree structure:
#
# .
# |-- github owner
# |-- repository name
# |-- issue-<id>
# | |-- issue-<id>.json
# | |-- comment-<id>.json
# | |-- pr-<id>.json
# | |-- pr-comment-<id>.json
#
# Github API rate limiting
# ------------------------
#
# Github limits API requests to 5,000 requests per hour.
# This means it can take hours or even days to scrape the data.
#
# There are projects that archive the public Github events stream:
# http://githubarchive.org/ and http://ghtorrent.org/
#
# However, they only go back to 2011 or 2012, and I'm unsure whether they
# include the body text as markdown or html (which is important for removing
# code snippets from the text passed to the sentiment analysis library.
#
# How we fetch the data
# =====================
#
# If the main method is called, data is fetched in an iterative fashion,
# in order to cut down on API calls as much as possible:
#
# 1. Fetch the list of all issues (up to 100 issues at a time),
# write issue-<id>.json
#
# 2. Iterate over the issues json files, finding issues with comments.
# For each issue with comments, fetch up to 100 comments at a time,
# write comment-<id>.json
#
# 3. Iterate over the issues json files, finding which issues are pull requests.
# For each pull request, fetch the pull request data,
# write pr-<id>.json
#
# 4. Look at the pull request json data, and if the PR has review comments,
# fetch up to 100 review comments at a time,
# write pr-comment-<id>.json
#
# How long will this take?
# ========================
#
# Assuming no comments any issue or pull request,
# (and note that a pull request can be an issue, but an issue may not be a PR),
# the minimum number of API calls that the library will make is:
#
# (num issues + PRs)/100 + 0 + (num PRs) + 0
#
# Assuming one comment on each issue and pull request
# the minimum number of API calls that the library will make is:
#
# (num issues + PRs)/100 + (num issues + PRs)*2 + (num PRs) + (num PRs)*2
#
# Divide the best or worst cases total number of API requests by 5,000
# to see how many hours this will take you.
from github3 import GitHub, login, issues
import os
import argparse
import time
import datetime
import json
import glob
def writeJson(path, prefix, obj):
fp = os.path.join(path, prefix + str(obj.id) + '.json')
if not os.path.exists(fp):
with open(fp, 'w') as f:
f.write(obj.as_json())
# rate limiting resets after 1 hour (60 minutes)
GITHUB_RATELIMIT_INTERVAL = 60
# Note that the etag is absolutely useless for our initial fetching.
# An etag to fetch a particular issue doesn't change unless a new issue is
# added or an issue is updated.
def scrapeIssues(repo, repoPath, processedIssueDate):
"""Given a github repo object, create a directory structure for issues created after processedIssueDate"""
while True:
try:
# We have to ask for issues in created order, because the update
# time could change in between waiting for our rate limit to renew.
issues = repo.issues(sort='created', direction='asc', state='all',
since=processedIssueDate)
numIssues = 0
for i in issues:
issuePath = os.path.join(repoPath, 'issue-' + str(i.id))
if not os.path.exists(issuePath):
os.makedirs(issuePath)
writeJson(issuePath, 'issue-', i)
processedIssueDate = i.as_dict()['created_at']
numIssues += 1
# FIXME: write issues etag to file to update the repo later
break
except:
print('Processed', str(numIssues), 'issues')
print('Github rate limit at',
str(repo.ratelimit_remaining) + ', sleeping until',
datetime.datetime.now() + datetime.timedelta(minutes = GITHUB_RATELIMIT_INTERVAL))
time.sleep(60*GITHUB_RATELIMIT_INTERVAL)
def scrapeIssueComments(repo, repoPath):
"""Given a github repo object, scrape comments, ignoring issues with fetched comments."""
# Get the list of all directories in the repoPath, looking for issue directories.
skippedIssues = 0
noCommentIssues = 0
issueList = []
# FIXME This will work if we're only using the API calls, however,
# if this script is used in conjunction with something that uses
# archives of github public events, we might only have some of
# the comments on an issue.
for f in os.listdir(repoPath):
if not f.startswith('issue-'):
continue
issuePath = os.path.join(repoPath, f)
if any(issueFile.startswith('comment-') for issueFile in os.listdir(issuePath)):
skippedIssues += 1
else:
# Check the json to see if this issue has any comments.
with open(os.path.join(issuePath, f + '.json'), 'r') as issueFile:
numComments = int(json.load(issueFile)['comments'])
if numComments > 0:
print(issuePath)
issueList.append(f)
else:
noCommentIssues += 1
print('Skipping', str(skippedIssues), 'issues with already fetched comments')
print('Skipping', str(noCommentIssues), 'issues with no comments')
print('Fetching comments for', str(len(issueList)), 'issues')
numComments = 0
for f in issueList:
issueId = f.split('-')[1]
while True:
try:
issuePath = os.path.join(repoPath, f)
with open(os.path.join(issuePath, f + '.json'), 'r') as issueFile:
realId = json.load(issueFile)['number']
i = repo.issue(realId)
print(issuePath)
# FIXME: we could hit our rate limit in between the call to grab
# the issue and the request to get a comment.
# We could also lose comments if there are more than 100 comments
# (the pagination unit github3.py uses) and we hit the rate limit
# in the middle of the for loop. The second case should be rare,
# and the first we can catch by re-running the pass again.
for c in i.comments():
print(c.id)
writeJson(issuePath, 'comment-', c)
numComments += 1
break
except:
print('Processed', str(numComments), 'comments')
print('Github rate limit at',
str(repo.ratelimit_remaining) + ', sleeping until',
datetime.datetime.now() + datetime.timedelta(minutes = GITHUB_RATELIMIT_INTERVAL))
time.sleep(60*GITHUB_RATELIMIT_INTERVAL)
# Oddities of the github API
#
# Lesson 1:
#
# At some point, there was no API difference between issues and pull requests.
# Now, an issue can be a normal issue, or it can reference a pull request object.
#
# Any comments made on the issue are issue comments, and require two API calls
# (one to fetch the issue and the second to fetch a page of issue comments).
#
# Any comments made on the pull request commit or code are known as "review comments".
# The only way to get review comments referenced from an issue is to
# first, fetch the pull request object, and second, request the review comments.
#
# So, even through both issue comments and review comments appear on the same webpage,
# they are completely different beasts.
def scrapePullRequestComments(repo, repoPath):
"""Given a github repo object, scrape pull requests and review comments"""
# Get the list of all directories in the repoPath, looking for issue directories.
issueList = []
skippedIssues = 0
# Find all issues that are a pull request
for f in os.listdir(repoPath):
if not f.startswith('issue-'):
continue
issuePath = os.path.join(repoPath, f)
if any(issueFile.startswith('pr-comment-') for issueFile in os.listdir(issuePath)):
skippedIssues += 1
else:
# Check the json to see if this issue is a pull request
with open(os.path.join(issuePath, f + '.json'), 'r') as issueFile:
soup = json.load(issueFile)
realId = soup['number']
try:
pullRequest = soup['pull_request']
except:
continue
issueList.append((f, realId))
print('Skipping', str(skippedIssues), 'pull requests with fetched review comments')
print('Processing', str(len(issueList)), 'pull requests')
numComments = 0
for f in issueList:
realId = f[1]
while True:
try:
issuePath = os.path.join(repoPath, f[0])
print(issuePath)
pr = repo.pull_request(realId)
writeJson(issuePath, 'pr-', pr)
# Find out if this PR has any comments
prj = json.loads(pr.as_json())
if int(prj['comments']) == 0:
break
for c in pr.review_comments():
print(c.id)
writeJson(issuePath, 'pr-comment-', c)
numComments += 1
break
except:
print('Processed', str(numComments), 'review comments')
print('Github rate limit at',
str(repo.ratelimit_remaining) + ', sleeping until',
datetime.datetime.now() + datetime.timedelta(minutes = GITHUB_RATELIMIT_INTERVAL))
time.sleep(60*GITHUB_RATELIMIT_INTERVAL)
def __login(args):
try:
with open(args.credentials_file_or_token, 'r') as f:
username = f.readline().rstrip()
password = f.readline().rstrip()
return login(username=username, password=password)
except IOError:
return login(token=args.credentials_file_or_token)
def main():
parser = argparse.ArgumentParser(description='Scrape issues and comments from a github repository, by authenticating as a github user.')
parser.add_argument('repository', help='github repository name')
parser.add_argument('owner', help='github username of repository owner')
parser.add_argument('credentials_file_or_token', help='OAuth token or path to file storing github username and password to use for authentication (two lines)')
args = parser.parse_args()
repoPath = os.path.join(args.owner, args.repository)
lastIssue = os.path.join(repoPath, 'last-processed-issue'+ '.txt')
g = __login(args)
repo = g.repository(args.owner, args.repository)
if not repo:
print('No such repo.')
quit()
# Too bad makedirs exist_ok was removed in 3.4.1
if not os.path.exists(repoPath):
os.makedirs(repoPath)
if repo.ratelimit_remaining != 0:
print('Github rate limit at', str(repo.ratelimit_remaining))
scrapeIssues(repo, repoPath, None)
scrapeIssueComments(repo, repoPath)
scrapePullRequestComments(repo, repoPath)
if __name__ == "__main__":
main()
| sarahsharp/foss-heartbeat | ghscraper.py | Python | gpl-3.0 | 12,052 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Pierre Haessig — March 2014
"""
Qt adaptation of Gael Varoquaux's tutorial to integrate Matplotlib
http://docs.enthought.com/traitsui/tutorials/traits_ui_scientific_app.html#extending-traitsui-adding-a-matplotlib-figure-to-our-application
based on Qt-based code shared by Didrik Pinte, May 2012
http://markmail.org/message/z3hnoqruk56g2bje
adapted and tested to work with PySide from Anaconda in March 2014
"""
from pyface.qt import QtGui, QtCore
import matplotlib
# We want matplotlib to use a QT backend
matplotlib.use('Qt4Agg')
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT
from matplotlib.figure import Figure
from traits.api import Any, Instance
from traitsui.qt4.editor import Editor
from traitsui.qt4.basic_editor_factory import BasicEditorFactory
class _MPLFigureEditor(Editor):
scrollable = True
def init(self, parent):
self.control = self._create_canvas(parent)
self.set_tooltip()
def update_editor(self):
pass
def _create_canvas(self, parent):
""" Create the MPL canvas. """
# matplotlib commands to create a canvas
frame = QtGui.QWidget()
mpl_canvas = FigureCanvas(self.value)
mpl_canvas.setParent(frame)
mpl_toolbar = NavigationToolbar2QT(mpl_canvas,frame)
vbox = QtGui.QVBoxLayout()
vbox.addWidget(mpl_canvas)
vbox.addWidget(mpl_toolbar)
frame.setLayout(vbox)
return frame
class MPLFigureEditor(BasicEditorFactory):
klass = _MPLFigureEditor
if __name__ == "__main__":
# Create a window to demo the editor
from traits.api import HasTraits, Int, Float, on_trait_change
from traitsui.api import View, Item
from numpy import sin, cos, linspace, pi
class Test(HasTraits):
figure = Instance(Figure, ())
n = Int(11)
a = Float(0.5)
view = View(Item('figure', editor=MPLFigureEditor(),
show_label=False),
Item('n'),
Item('a'),
width=400,
height=300,
resizable=True)
def __init__(self):
super(Test, self).__init__()
axes = self.figure.add_subplot(111)
self._t = linspace(0, 2*pi, 200)
self.plot()
@on_trait_change('n,a')
def plot(self):
t = self._t
a = self.a
n = self.n
axes = self.figure.axes[0]
if not axes.lines:
axes.plot(sin(t)*(1+a*cos(n*t)), cos(t)*(1+a*cos(n*t)))
else:
l = axes.lines[0]
l.set_xdata(sin(t)*(1+a*cos(n*t)))
l.set_ydata(cos(t)*(1+a*cos(n*t)))
canvas = self.figure.canvas
if canvas is not None:
canvas.draw()
t = Test()
t.configure_traits()
| marshallmcdonnell/interactive_plotting | TraitsUI/matplotlib/traitsui_mpl_qt_test.py | Python | mit | 2,981 |
__version__ = "1.0b2.dev11"
| pyohio/symposion | symposion/__init__.py | Python | bsd-3-clause | 28 |
import django
from django.db import models
from django.db.models.sql.query import LOOKUP_SEP
from django.db.models.deletion import Collector
from django.db.models.fields.related import ManyToOneRel
from django.contrib.auth import get_permission_codename
from django.forms.forms import pretty_name
from django.utils import formats
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.encoding import force_unicode, smart_unicode, smart_str
from django.utils.translation import ungettext
from django.core.urlresolvers import reverse
from django.conf import settings
from django.forms import Media
from django.utils.translation import get_language
import datetime
import decimal
if 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.templatetags.staticfiles import static
else:
from django.templatetags.static import static
try:
import json
except ImportError:
from django.utils import simplejson as json
try:
from django.utils.timezone import template_localtime as tz_localtime
except ImportError:
from django.utils.timezone import localtime as tz_localtime
try:
from django.contrib.auth import get_user_model
User = get_user_model()
username_field = User.USERNAME_FIELD
except Exception:
from django.contrib.auth.models import User
username_field = 'username'
def xstatic(*tags):
from vendors import vendors
node = vendors
fs = []
lang = get_language()
for tag in tags:
try:
for p in tag.split('.'):
node = node[p]
except Exception, e:
if tag.startswith('xadmin'):
file_type = tag.split('.')[-1]
if file_type in ('css', 'js'):
node = "xadmin/%s/%s" % (file_type, tag)
else:
raise e
else:
raise e
if type(node) in (str, unicode):
files = node
else:
mode = 'dev'
if not settings.DEBUG:
mode = getattr(settings, 'STATIC_USE_CDN',
False) and 'cdn' or 'production'
if mode == 'cdn' and mode not in node:
mode = 'production'
if mode == 'production' and mode not in node:
mode = 'dev'
files = node[mode]
files = type(files) in (list, tuple) and files or [files, ]
fs.extend(files)
return [f.startswith('http://') and f or static(f) for f in fs]
def vendor(*tags):
media = Media()
for tag in tags:
file_type = tag.split('.')[-1]
files = xstatic(tag)
if file_type == 'js':
media.add_js(files)
elif file_type == 'css':
media.add_css({'screen': files})
return media
def lookup_needs_distinct(opts, lookup_path):
"""
Returns True if 'distinct()' should be used to query the given lookup path.
"""
field_name = lookup_path.split('__', 1)[0]
field = opts.get_field_by_name(field_name)[0]
if ((hasattr(field, 'rel') and
isinstance(field.rel, models.ManyToManyRel)) or
(isinstance(field, models.fields.related.ManyToOneRel) and
not field.field.unique)):
return True
return False
def prepare_lookup_value(key, value):
"""
Returns a lookup value prepared to be used in queryset filtering.
"""
# if key ends with __in, split parameter into separate values
if key.endswith('__in'):
value = value.split(',')
# if key ends with __isnull, special case '' and false
if key.endswith('__isnull') and type(value) == str:
if value.lower() in ('', 'false'):
value = False
else:
value = True
return value
def quote(s):
"""
Ensure that primary key values do not confuse the admin URLs by escaping
any '/', '_' and ':' characters. Similar to urllib.quote, except that the
quoting is slightly different so that it doesn't get automatically
unquoted by the Web browser.
"""
if not isinstance(s, basestring):
return s
res = list(s)
for i in range(len(res)):
c = res[i]
if c in """:/_#?;@&=+$,"<>%\\""":
res[i] = '_%02X' % ord(c)
return ''.join(res)
def unquote(s):
"""
Undo the effects of quote(). Based heavily on urllib.unquote().
"""
if not isinstance(s, basestring):
return s
mychr = chr
myatoi = int
list = s.split('_')
res = [list[0]]
myappend = res.append
del list[0]
for item in list:
if item[1:2]:
try:
myappend(mychr(myatoi(item[:2], 16)) + item[2:])
except ValueError:
myappend('_' + item)
else:
myappend('_' + item)
return "".join(res)
def flatten_fieldsets(fieldsets):
"""Returns a list of field names from an admin fieldsets structure."""
field_names = []
for name, opts in fieldsets:
for field in opts['fields']:
# type checking feels dirty, but it seems like the best way here
if type(field) == tuple:
field_names.extend(field)
else:
field_names.append(field)
return field_names
def get_deleted_objects(objs, opts, user, admin_site, using):
"""
Find all objects related to ``objs`` that should also be deleted. ``objs``
must be a homogenous iterable of objects (e.g. a QuerySet).
Returns a nested list of strings suitable for display in the
template with the ``unordered_list`` filter.
"""
collector = NestedObjects(using=using)
collector.collect(objs)
perms_needed = set()
def format_callback(obj):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
if has_admin:
admin_url = reverse('%s:%s_%s_change'
% (admin_site.name,
opts.app_label,
opts.object_name.lower()),
None, (quote(obj._get_pk_val()),))
p = '%s.%s' % (opts.app_label, get_permission_codename('delete', opts))
if not user.has_perm(p):
perms_needed.add(opts.verbose_name)
# Display a link to the admin page.
return mark_safe(u'<span class="label label-info">%s:</span> <a href="%s">%s</a>' %
(escape(capfirst(opts.verbose_name)),
admin_url,
escape(obj)))
else:
# Don't display link to edit, because it either has no
# admin or is edited inline.
return mark_safe(u'<span class="label label-info">%s:</span> %s' %
(escape(capfirst(opts.verbose_name)),
escape(obj)))
to_delete = collector.nested(format_callback)
protected = [format_callback(obj) for obj in collector.protected]
return to_delete, perms_needed, protected
class NestedObjects(Collector):
def __init__(self, *args, **kwargs):
super(NestedObjects, self).__init__(*args, **kwargs)
self.edges = {} # {from_instance: [to_instances]}
self.protected = set()
def add_edge(self, source, target):
self.edges.setdefault(source, []).append(target)
def collect(self, objs, source_attr=None, **kwargs):
for obj in objs:
if source_attr:
self.add_edge(getattr(obj, source_attr), obj)
else:
self.add_edge(None, obj)
try:
return super(NestedObjects, self).collect(objs, source_attr=source_attr, **kwargs)
except models.ProtectedError, e:
self.protected.update(e.protected_objects)
def related_objects(self, related, objs):
qs = super(NestedObjects, self).related_objects(related, objs)
return qs.select_related(related.field.name)
def _nested(self, obj, seen, format_callback):
if obj in seen:
return []
seen.add(obj)
children = []
for child in self.edges.get(obj, ()):
children.extend(self._nested(child, seen, format_callback))
if format_callback:
ret = [format_callback(obj)]
else:
ret = [obj]
if children:
ret.append(children)
return ret
def nested(self, format_callback=None):
"""
Return the graph as a nested list.
"""
seen = set()
roots = []
for root in self.edges.get(None, ()):
roots.extend(self._nested(root, seen, format_callback))
return roots
def model_format_dict(obj):
"""
Return a `dict` with keys 'verbose_name' and 'verbose_name_plural',
typically for use with string formatting.
`obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance.
"""
if isinstance(obj, (models.Model, models.base.ModelBase)):
opts = obj._meta
elif isinstance(obj, models.query.QuerySet):
opts = obj.model._meta
else:
opts = obj
return {
'verbose_name': force_unicode(opts.verbose_name),
'verbose_name_plural': force_unicode(opts.verbose_name_plural)
}
def model_ngettext(obj, n=None):
"""
Return the appropriate `verbose_name` or `verbose_name_plural` value for
`obj` depending on the count `n`.
`obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance.
If `obj` is a `QuerySet` instance, `n` is optional and the length of the
`QuerySet` is used.
"""
if isinstance(obj, models.query.QuerySet):
if n is None:
n = obj.count()
obj = obj.model
d = model_format_dict(obj)
singular, plural = d["verbose_name"], d["verbose_name_plural"]
return ungettext(singular, plural, n or 0)
def is_rel_field(name,model):
if hasattr(name,'split') and name.find("__")>0:
parts = name.split("__")
if parts[0] in model._meta.get_all_field_names():
return True
return False
def lookup_field(name, obj, model_admin=None):
opts = obj._meta
try:
f = opts.get_field(name)
except models.FieldDoesNotExist:
# For non-field values, the value is either a method, property or
# returned via a callable.
if callable(name):
attr = name
value = attr(obj)
elif (model_admin is not None and hasattr(model_admin, name) and
not name == '__str__' and not name == '__unicode__'):
attr = getattr(model_admin, name)
value = attr(obj)
else:
if is_rel_field(name,obj):
parts = name.split("__")
rel_name,sub_rel_name = parts[0],"__".join(parts[1:])
rel_obj = getattr(obj,rel_name)
if rel_obj is not None:
return lookup_field(sub_rel_name,rel_obj,model_admin)
attr = getattr(obj, name)
if callable(attr):
value = attr()
else:
value = attr
f = None
else:
attr = None
value = getattr(obj, name)
return f, attr, value
def label_for_field(name, model, model_admin=None, return_attr=False):
"""
Returns a sensible label for a field name. The name can be a callable or the
name of an object attributes, as well as a genuine fields. If return_attr is
True, the resolved attribute (which could be a callable) is also returned.
This will be None if (and only if) the name refers to a field.
"""
attr = None
try:
field = model._meta.get_field_by_name(name)[0]
if isinstance(field, ManyToOneRel):
label = field.opts.verbose_name
else:
label = field.verbose_name
except models.FieldDoesNotExist:
if name == "__unicode__":
label = force_unicode(model._meta.verbose_name)
attr = unicode
elif name == "__str__":
label = smart_str(model._meta.verbose_name)
attr = str
else:
if callable(name):
attr = name
elif model_admin is not None and hasattr(model_admin, name):
attr = getattr(model_admin, name)
elif hasattr(model, name):
attr = getattr(model, name)
elif is_rel_field(name,model):
parts = name.split("__")
rel_name,name = parts[0],"__".join(parts[1:])
field = model._meta.get_field_by_name(rel_name)[0]
if isinstance(field, ManyToOneRel):
label = field.opts.verbose_name
else:
label = field.verbose_name
rel_model = field.rel.to
rel_label = label_for_field(name, rel_model, model_admin=model_admin, return_attr=return_attr)
if return_attr:
rel_label,attr = rel_label
return ("%s %s"%(label,rel_label), attr)
else:
return "%s %s"%(label,rel_label)
else:
message = "Unable to lookup '%s' on %s" % (
name, model._meta.object_name)
if model_admin:
message += " or %s" % (model_admin.__class__.__name__,)
raise AttributeError(message)
if hasattr(attr, "short_description"):
label = attr.short_description
elif callable(attr):
if attr.__name__ == "<lambda>":
label = "--"
else:
label = pretty_name(attr.__name__)
else:
label = pretty_name(name)
if return_attr:
return (label, attr)
else:
return label
def help_text_for_field(name, model):
try:
help_text = model._meta.get_field_by_name(name)[0].help_text
except models.FieldDoesNotExist:
help_text = ""
return smart_unicode(help_text)
def admin_urlname(value, arg):
return 'xadmin:%s_%s_%s' % (value.app_label, value.module_name, arg)
def boolean_icon(field_val):
return mark_safe(u'<i class="%s" alt="%s"></i>' % (
{True: 'fa fa-check-circle text-success', False: 'fa fa-times-circle text-error', None: 'fa fa-question-circle muted'}[field_val], field_val))
def display_for_field(value, field):
from xadmin.views.list import EMPTY_CHANGELIST_VALUE
if field.flatchoices:
return dict(field.flatchoices).get(value, EMPTY_CHANGELIST_VALUE)
# NullBooleanField needs special-case null-handling, so it comes
# before the general null test.
elif isinstance(field, models.BooleanField) or isinstance(field, models.NullBooleanField):
return boolean_icon(value)
elif value is None:
return EMPTY_CHANGELIST_VALUE
elif isinstance(field, models.DateTimeField):
return formats.localize(tz_localtime(value))
elif isinstance(field, (models.DateField, models.TimeField)):
return formats.localize(value)
elif isinstance(field, models.DecimalField):
return formats.number_format(value, field.decimal_places)
elif isinstance(field, models.FloatField):
return formats.number_format(value)
elif isinstance(field.rel, models.ManyToManyRel):
return ', '.join([smart_unicode(obj) for obj in value.all()])
else:
return smart_unicode(value)
def display_for_value(value, boolean=False):
from xadmin.views.list import EMPTY_CHANGELIST_VALUE
if boolean:
return boolean_icon(value)
elif value is None:
return EMPTY_CHANGELIST_VALUE
elif isinstance(value, datetime.datetime):
return formats.localize(tz_localtime(value))
elif isinstance(value, (datetime.date, datetime.time)):
return formats.localize(value)
elif isinstance(value, (decimal.Decimal, float)):
return formats.number_format(value)
else:
return smart_unicode(value)
class NotRelationField(Exception):
pass
def get_model_from_relation(field):
if isinstance(field, ManyToOneRel):
return field.related_model
elif getattr(field, 'rel'): # or isinstance?
return field.rel.to
else:
raise NotRelationField
def reverse_field_path(model, path):
""" Create a reversed field path.
E.g. Given (Order, "user__groups"),
return (Group, "user__order").
Final field must be a related model, not a data field.
"""
reversed_path = []
parent = model
pieces = path.split(LOOKUP_SEP)
for piece in pieces:
field, model, direct, m2m = parent._meta.get_field_by_name(piece)
# skip trailing data field if extant:
if len(reversed_path) == len(pieces) - 1: # final iteration
try:
get_model_from_relation(field)
except NotRelationField:
break
if direct:
related_name = field.related_query_name()
parent = field.rel.to
else:
related_name = field.field.name
parent = field.model
reversed_path.insert(0, related_name)
return (parent, LOOKUP_SEP.join(reversed_path))
def get_fields_from_path(model, path):
""" Return list of Fields given path relative to model.
e.g. (ModelX, "user__groups__name") -> [
<django.db.models.fields.related.ForeignKey object at 0x...>,
<django.db.models.fields.related.ManyToManyField object at 0x...>,
<django.db.models.fields.CharField object at 0x...>,
]
"""
pieces = path.split(LOOKUP_SEP)
fields = []
for piece in pieces:
if fields:
parent = get_model_from_relation(fields[-1])
else:
parent = model
fields.append(parent._meta.get_field_by_name(piece)[0])
# fields.append(parent._meta.get_field(piece))
return fields
def remove_trailing_data_field(fields):
""" Discard trailing non-relation field if extant. """
try:
get_model_from_relation(fields[-1])
except NotRelationField:
fields = fields[:-1]
return fields
def get_limit_choices_to_from_path(model, path):
""" Return Q object for limiting choices if applicable.
If final model in path is linked via a ForeignKey or ManyToManyField which
has a `limit_choices_to` attribute, return it as a Q object.
"""
fields = get_fields_from_path(model, path)
fields = remove_trailing_data_field(fields)
limit_choices_to = (
fields and hasattr(fields[-1], 'rel') and
getattr(fields[-1].rel, 'limit_choices_to', None))
if not limit_choices_to:
return models.Q() # empty Q
elif isinstance(limit_choices_to, models.Q):
return limit_choices_to # already a Q
else:
return models.Q(**limit_choices_to) # convert dict to Q
def sortkeypicker(keynames):
negate = set()
for i, k in enumerate(keynames):
if k[:1] == '-':
keynames[i] = k[1:]
negate.add(k[1:])
def getit(adict):
composite = [adict[k] for k in keynames]
for i, (k, v) in enumerate(zip(keynames, composite)):
if k in negate:
composite[i] = -v
return composite
return getit
| iedparis8/django-xadmin | util.py | Python | bsd-3-clause | 19,511 |
```
Write an efficient algorithm that searches for a value in an m x n matrix. This matrix has the following properties:
Integers in each row are sorted from left to right.
The first integer of each row is greater than the last integer of the previous row.
For example,
Consider the following matrix:
[
[1, 3, 5, 7],
[10, 11, 16, 20],
[23, 30, 34, 50]
]
Given target = 3, return true
```
# Good answer !
class Solution:
# @param matrix, a list of lists of integers
# @param target, an integer
# @return a boolean
def searchMatrix(self, matrix, target):
m = len(matrix)
n = len(matrix[0])
left = 0; right = m*n - 1
while left <= right:
mid = (left+right)/2
value = matrix[mid/n][mid%n]
if value == target:
return True
elif value > target:
right = mid -1
elif value < target:
left = mid + 1
return False
| UmassJin/Leetcode | Array/Search_in_2D_matrix.py | Python | mit | 999 |
from PyQt5.QtCore import pyqtSlot, pyqtSignal, Qt
from PyQt5.QtWidgets import QDialog
from urh.ui.ui_advanced_modulation_settings import Ui_DialogAdvancedModSettings
class AdvancedModulationOptionsController(QDialog):
pause_threshold_edited = pyqtSignal(int)
message_length_divisor_edited = pyqtSignal(int)
def __init__(self, pause_threshold: int, message_length_divisor: int, parent=None):
super().__init__(parent)
self.ui = Ui_DialogAdvancedModSettings()
self.ui.setupUi(self)
self.setAttribute(Qt.WA_DeleteOnClose)
self.pause_threshold = pause_threshold
self.message_length_divisor = message_length_divisor
self.ui.spinBoxPauseThreshold.setValue(pause_threshold)
self.ui.spinBoxMessageLengthDivisor.setValue(message_length_divisor)
self.create_connects()
def create_connects(self):
self.ui.buttonBox.accepted.connect(self.on_accept_clicked)
self.ui.buttonBox.rejected.connect(self.reject)
@pyqtSlot()
def on_accept_clicked(self):
if self.pause_threshold != self.ui.spinBoxPauseThreshold.value():
self.pause_threshold_edited.emit(self.ui.spinBoxPauseThreshold.value())
if self.message_length_divisor != self.ui.spinBoxMessageLengthDivisor.value():
self.message_length_divisor_edited.emit(self.ui.spinBoxMessageLengthDivisor.value())
self.accept()
| splotz90/urh | src/urh/controller/AdvancedModulationOptionsController.py | Python | gpl-3.0 | 1,420 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in tensor_array_ops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import tensor_array_ops
# TODO(b/31222613): These ops may be differentiable, and there may be
# latent bugs here.
ops.NotDifferentiable("TensorArray")
ops.NotDifferentiable("TensorArrayGrad")
ops.NotDifferentiable("TensorArraySize")
ops.NotDifferentiable("TensorArrayClose")
ops.NotDifferentiable("TensorArrayV2")
ops.NotDifferentiable("TensorArrayGradV2")
ops.NotDifferentiable("TensorArraySizeV2")
ops.NotDifferentiable("TensorArrayCloseV2")
ops.NotDifferentiable("TensorArrayV3")
ops.NotDifferentiable("TensorArrayGradV3")
ops.NotDifferentiable("TensorArraySizeV3")
ops.NotDifferentiable("TensorArrayCloseV3")
def _GetGradSource(op_or_tensor):
"""Identify which call to tf.gradients created this gradient op or tensor.
TensorArray gradient calls use an accumulator TensorArray object. If
multiple gradients are calculated and run in the same session, the multiple
gradient nodes may accidentally flow throuth the same accumulator TensorArray.
This double counting breaks the TensorArray gradient flow.
The solution is to identify which gradient call this particular
TensorArray*Grad is being called in, by looking at the input gradient
tensor's name, and create or lookup an accumulator gradient TensorArray
associated with this specific call. This solves any confusion and ensures
different gradients from the same forward graph get their own accumulators.
This function creates the unique label associated with the tf.gradients call
that is used to create the gradient TensorArray.
Args:
op_or_tensor: `Tensor` or `Operation` which is an input to a
TensorArray*Grad call.
Returns:
A python string, the unique label associated with this particular
gradients calculation.
Raises:
ValueError: If not called within a gradients calculation.
"""
name_tokens = op_or_tensor.name.split("/")
grad_pos = [i for i, x in enumerate(name_tokens) if x.startswith("gradients")]
if not grad_pos:
raise ValueError(
"Expected op/tensor name to start with gradients (excluding scope)"
", got: %s" % op_or_tensor.name)
return "/".join(name_tokens[:grad_pos[-1] + 1])
@ops.RegisterGradient("TensorArrayRead")
@ops.RegisterGradient("TensorArrayReadV2")
@ops.RegisterGradient("TensorArrayReadV3")
def _TensorArrayReadGrad(op, grad):
"""Gradient for TensorArrayRead.
Args:
op: Forward TensorArrayRead op.
grad: Gradient `Tensor` to TensorArrayRead.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
index = op.inputs[1]
flow = op.inputs[2]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
w_g = g.write(index, grad)
return [None, None, w_g.flow]
@ops.RegisterGradient("TensorArrayWrite")
@ops.RegisterGradient("TensorArrayWriteV2")
@ops.RegisterGradient("TensorArrayWriteV3")
def _TensorArrayWriteGrad(op, flow):
"""Gradient for TensorArrayWrite.
Args:
op: Forward TensorArrayWrite op.
flow: Gradient `Tensor` flow to TensorArrayWrite.
Returns:
A grad `Tensor`, the gradient created in an upstream ReadGrad or PackGrad.
"""
# handle is the output store_handle of TensorArrayReadGrad or
# the handle output of TensorArrayWriteGrad. we must use this one.
handle = op.inputs[0]
index = op.inputs[1]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.read(index)
return [None, None, grad, flow]
@ops.RegisterGradient("TensorArrayGather")
@ops.RegisterGradient("TensorArrayGatherV2")
@ops.RegisterGradient("TensorArrayGatherV3")
def _TensorArrayGatherGrad(op, grad):
"""Gradient for TensorArrayGather.
Args:
op: Forward TensorArrayGather op.
grad: Gradient `Tensor` to TensorArrayGather.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
indices = op.inputs[1]
flow = op.inputs[2]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
u_g = g.scatter(indices, grad)
return [None, None, u_g.flow]
@ops.RegisterGradient("TensorArrayScatter")
@ops.RegisterGradient("TensorArrayScatterV2")
@ops.RegisterGradient("TensorArrayScatterV3")
def _TensorArrayScatterGrad(op, flow):
"""Gradient for TensorArrayScatter.
Args:
op: Forward TensorArrayScatter op.
flow: Gradient `Tensor` flow to TensorArrayScatter.
Returns:
A grad `Tensor`, the gradient created in upstream ReadGrads or PackGrad.
"""
handle = op.inputs[0]
indices = op.inputs[1]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.gather(indices)
return [None, None, grad, flow]
@ops.RegisterGradient("TensorArrayConcat")
@ops.RegisterGradient("TensorArrayConcatV2")
@ops.RegisterGradient("TensorArrayConcatV3")
def _TensorArrayConcatGrad(op, grad, unused_lengths_grad):
"""Gradient for TensorArrayConcat.
Args:
op: Forward TensorArrayConcat op.
grad: Gradient `Tensor` to TensorArrayConcat.
Returns:
A flow `Tensor`, which can be used in control dependencies to
force the write of `grad` to the gradient `TensorArray`.
"""
# Note: the forward flow dependency in the call to grad() is necessary for
# the case of dynamic sized TensorArrays. When creating the gradient
# TensorArray, the final size of the forward array must be known.
# For this we need to wait until it has been created by depending on
# the input flow of the original op.
handle = op.inputs[0]
flow = op.inputs[1]
lengths = op.outputs[1]
dtype = op.get_attr("dtype")
grad_source = _GetGradSource(grad)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
u_g = g.split(grad, lengths=lengths)
# handle, flow_in
return [None, u_g.flow]
@ops.RegisterGradient("TensorArraySplit")
@ops.RegisterGradient("TensorArraySplitV2")
@ops.RegisterGradient("TensorArraySplitV3")
def _TensorArraySplitGrad(op, flow):
"""Gradient for TensorArraySplit.
Args:
op: Forward TensorArraySplit op.
flow: Gradient `Tensor` flow to TensorArraySplit.
Returns:
A grad `Tensor`, the gradient created in upstream ReadGrads or PackGrad.
"""
handle = op.inputs[0]
dtype = op.get_attr("T")
grad_source = _GetGradSource(flow)
g = (tensor_array_ops.TensorArray(dtype=dtype, handle=handle, flow=flow,
colocate_with_first_write_call=False)
.grad(source=grad_source, flow=flow))
grad = g.concat()
# handle, value, lengths, flow_in
return [None, grad, None, flow]
| unnikrishnankgs/va | venv/lib/python3.5/site-packages/tensorflow/python/ops/tensor_array_grad.py | Python | bsd-2-clause | 9,083 |
from numpy import array, arctan2, arcsin
from bullet.bullet import Vector3, Quaternion, Transform
from cycgkit.cgtypes import quat, mat4, vec3
radian = 0.0174532925
FOVconst = 57.295780490442972
defaultMass = 1.0
defaultGravity = 9.8
class eVec3(vec3):
def __init__(self, *args):
super(eVec3, self).__init__(*args)
value = 0
self.x = value
self.y = value
self.z = value
# @staticmethod
# def fromNumpy(narray):
# """
#
# @type narray: array
# """
# nvec = eVec3()
# nvec.x = narray.X
# nvec.y = narray.Y
# nvec.z = narray.Z
# return nvec
def toNumpy(self):
"""
@rtype : array
"""
return array(self, 'f')
@staticmethod
def frombtVector3(btVec3):
return eVec3(btVec3.x, btVec3.y, btVec3.z)
def tobtVector3(self):
return Vector3(self.x, self.y, self.z)
def toList(self):
return list(self)
def setAll(self, value):
self.x = value
self.y = value
self.z = value
def cross(self, other):
res = super(eVec3, self).cross(other)
return eVec3(res)
def bulletQuatToRotList(btquat):
q = quat(btquat.getW(), btquat.getX(), btquat.getY(), btquat.getZ())
x, y, z = _threeAxisRot(2 * (q.x * q.y + q.w * q.z),
q.w * q.w + q.x * q.x - q.y * q.y - q.z * q.z,
-2 * (q.x * q.z - q.w * q.y),
2 * (q.y * q.z + q.w * q.x),
q.w * q.w - q.x * q.x - q.y * q.y + q.z * q.z)
lvec = [x / radian, y / radian, -1.0 * (z / radian)]
# todo: add 'getOpenGLMatrix(m)' to bullet transform
'''
btTransform trans2;
pinRigidBody->getMotionState()->getWorldTransform(trans2);
btScalar m[16];
trans.getOpenGLMatrix(m);
'''
return lvec
def bulletVectorToList(vector):
return [vector.x, vector.y, vector.z]
def listToBulletVector(oList):
return Vector3(oList[0], oList[1], oList[2])
def listToBulletQuat(oList):
quat = Quaternion.fromScalars(oList[0], oList[1], oList[2], oList[3])
return quat
def cgQuatToBulletQuat(cgQuat):
quat = Quaternion.fromScalars(cgQuat.x, cgQuat.y, cgQuat.z, cgQuat.w)
return quat
def btTransformFromPosRotMat(pos, rotMat):
if rotMat is None:
rotMat = mat4.identity()
btpos = listToBulletVector(pos)
btrot = cgQuatToBulletQuat(quat().fromMat(mat4(rotMat)))
newtrans = Transform()
newtrans.setOrigin(btpos)
newtrans.setRotation(btrot)
return newtrans
def scaleNumber(val, src, dst):
"""
http://stackoverflow.com/a/4155197
Scale the given value from the scale of src to the scale of dst.
@rtype : int
@type dst: list
@type src: list
@type val: int
Examples:
>> scaleNumber(0, (0.0, 99.0), (-1.0, 1.0))
-1.0
>> scaleNumber(99, (0.0, 99.0), (-1.0, 1.0))
1
>> scaleNumber(1, (0.0, 2.0), (0.0, 1.0))
0.5
"""
try:
return ((val - src[0]) / (src[1] - src[0])) * (dst[1] - dst[0]) + dst[0]
except ZeroDivisionError:
return dst[1] / 2.0
def _threeAxisRot(r11, r12, r21, r31, r32):
# http://bediyap.com/programming/convert-quaternion-to-euler-rotations/
# http://stackoverflow.com/a/27496984
res = [0, 0, 0]
res[0] = arctan2(r31, r32)
try:
res[1] = arcsin(r21)
except RuntimeWarning:
pass
res[2] = arctan2(r11, r12)
return res
def ewMul(a, b):
return vec3(a.x * b.x, a.y * b.y, a.z * b.z)
def ewDiv(a, b):
return vec3(a.x / b.x, a.y / b.y, a.z / b.z) | jr-garcia/Engendro3D | e3d/commonValues.py | Python | mit | 3,654 |
import sys, serial
import HotRod
def processCmdline(args):
i = 0
flags = []
files = []
port = ''
while i < len(args):
if args[i][0] == '-':
if args[i][1] == 'p':
i += 1
port = args[i]
else:
flags.append(args[i])
else:
files.append(args[i])
i += 1
return (flags, files, port)
if len(sys.argv) == 0:
print "Usage:"
print " SetupHotRod.py [-p portname] [-s] [-d] [-r] [cmdFile] ..."
print " cmdFile Name of one or more serial commands file(s). Not required."
print " -p specify serial port, e.g. COM3 for Windows, /dev/ttyS2 for linux and Mac"
print " if not specified port will be autodetected"
print " -s save mapping to EEPROM after processing file"
print " -d dump current mapping after processing file"
print " -r reset mapping to default before processing file"
print " -l reinitialize mapping from EEPROM before processing file"
flags, files, HRPort = processCmdline(sys.argv[1:])
if len(HRPort) == 0:
HRPort = HotRod.findHotRod()
if not HRPort:
print "No HotRod found."
exit(1)
else:
print "HotRod found on port %s." % HRPort
lines = []
for inputFile in files:
with open(inputFile, 'r') as f:
lines.extend([l.strip()for l in f.readlines() if len(l) > 1])
commands = []
for l in lines:
a = l.split()
if len(a) == 0 or a[0][0] == '#':
continue
a[0] = a[0].upper()
a[1] = a[1].upper()
if a[0] == 'M':
if a[1] in HotRod.HRCodes.keys():
a[1] = "%02X" % HotRod.HRCodes[a[1]]
elif len(a[1]) != 2:
print "Bad code %s" % a[1]
break
if a[2] in HotRod.KBKeys.keys():
a[2] = "%02X" % HotRod.KBKeys[a[2]]
elif len(a[2]) == 1:
a[2] = "%02X" % (ord(a[2]))
elif len(a[2]) != 2:
print "Bad code %s" % a[2]
break
cmd = ' '.join(a)
else:
cmd = a[0]
commands.append(cmd)
if '-l' in flags:
commands.insert(0, 'L')
if '-r' in flags:
commands.insert(0, 'R')
if '-d' in flags:
commands.append('D')
if '-s' in flags:
commands.append('S')
sp = serial.Serial(port = HRPort, baudrate = 9600, bytesize = serial.EIGHTBITS, parity= serial.PARITY_NONE, stopbits = serial.STOPBITS_ONE, timeout = 0.2, write_timeout = 1)
for cmd in commands:
response = ''
sp.write(cmd + '\n')
c = sp.read(1)
while c != '$' and c != 0:
response += c
c = sp.read()
print response.strip()
if response.rfind('OK') < 0:
print "Error processing commands"
break
| fizzymagic/HotRod | SetupHotRod/SetupHotRod.py | Python | gpl-3.0 | 2,773 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import sys
from setuptools import setup, find_packages
PACKAGE_NAME = 'mozrunner'
PACKAGE_VERSION = '6.12'
desc = """Reliable start/stop/configuration of Mozilla Applications (Firefox, Thunderbird, etc.)"""
deps = ['mozdevice >= 0.37',
'mozfile >= 1.0',
'mozinfo >= 0.7',
'mozlog >= 3.0',
'mozprocess >= 0.23',
'mozprofile >= 0.18',
]
EXTRAS_REQUIRE = {'crash': ['mozcrash >= 0.14']}
# we only support python 2 right now
assert sys.version_info[0] == 2
setup(name=PACKAGE_NAME,
version=PACKAGE_VERSION,
description=desc,
long_description="see http://mozbase.readthedocs.org/",
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords='mozilla',
author='Mozilla Automation and Tools team',
author_email='tools@lists.mozilla.org',
url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
license='MPL 2.0',
packages=find_packages(),
package_data={'mozrunner': [
'resources/metrotestharness.exe'
]},
zip_safe=False,
install_requires=deps,
extras_require=EXTRAS_REQUIRE,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
mozrunner = mozrunner:cli
""",
)
| cstipkovic/spidermonkey-research | testing/mozbase/mozrunner/setup.py | Python | mpl-2.0 | 1,848 |
from django.conf import settings
from django.core.paginator import Paginator, EmptyPage
def paginate(request, items, per_page=settings.DEFAULT_PER_PAGE,
page_key='page'):
paginator = Paginator(items, per_page)
try:
page_number = int(request.GET[page_key])
page = paginator.page(page_number)
except (ValueError, KeyError, EmptyPage):
page = paginator.page(1)
return paginator, page
| frague59/wagtailpolls | wagtailpolls/pagination.py | Python | bsd-3-clause | 438 |
import plotly.tools as tls
import pandas as pd
from sqlalchemy import create_engine # database connection
import datetime as dt
from IPython.display import display
import plotly.plotly as py # interactive graphing
import plotly.graph_objs as go
from plotly.graph_objs import Bar, Scatter, Marker, Layout, Histogram, Box
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
import numpy as np
import matplotlib.mlab as mlab
import matplotlib.pyplot as plt
import psycopg2 as pg
#load python script that batch loads pandas df to sql
import cStringIO
data_dir = '/home/botty/Documents/CCFD/data/'
evt_name = 'Featurespace_events_output.csv'
auth_name = 'Featurespace_auths_output.csv'
db_name = 'c1_agg.db'
db_name = 'ccfd.db'
address = 'postgresql://script@localhost:5432/ccfd'
engine = create_engine(address)
connection = engine.raw_connection()
connection.text_factory = lambda x: unicode(x, 'utf-8', 'ignore')
cursor = connection.cursor()
drop_qry = '''DROP TABLE IF EXISTS {table}; '''
create_qry = '''
CREATE TABLE {table}
(
index bigint,
acct_id text,
AUTHZN_RQST_PROC_TM timestamp without time zone,
AUTHZN_APPRL_CD text,
AUTHZN_AMT double precision,
MRCH_NM text,
MRCH_CITY_NM text,
MRCH_PSTL_CD text,
MRCH_CNTRY_CD text,
MRCH_ID text,
TRMNL_ID text,
MRCH_CATG_CD text,
POS_ENTRY_MTHD_CD bigint,
POS_COND_CD bigint,
TRMNL_CLASFN_CD bigint,
TRMNL_CAPBLT_CD bigint,
TRMNL_PIN_CAPBLT_CD bigint,
TSYS_DCLN_REAS_CD bigint,
MRCH_TMP_PRTPN_IND text,
AUTHZN_MSG_TYPE_MODR_CD text,
AUTHZN_ACCT_STAT_CD text,
AUTHZN_MSG_TYPE_CD bigint,
AUTHZN_RQST_TYPE_CD bigint,
AUTHZN_RESPNS_CD bigint,
ACCT_STAT_REAS_NUM bigint,
RQST_CARD_SEQ_NUM text,
PIN_OFST_IND bigint,
PIN_VLDTN_IND text,
CARD_VFCN_REJ_CD text,
CARD_VFCN_RESPNS_CD text,
CARD_VFCN2_RESPNS_CD text,
CAVV_CD text,
ECMRC_SCURT_CD text,
ACQR_BIN_NUM text,
ACQR_CRCY_CD bigint,
CRCY_CNVRSN_RT bigint,
AUTHZN_APPRD_AMT double precision,
PRIOR_MONEY_AVL_AMT double precision,
PRIOR_CASH_AVL_AMT double precision,
ACCT_CL_AMT double precision,
ACCT_CURR_BAL double precision,
PREV_ADR_CHNG_DT timestamp without time zone,
PREV_PMT_DT timestamp without time zone,
PREV_PMT_AMT double precision,
PREV_CARD_RQST_DT timestamp without time zone,
FRD_IND text,
FRD_IND_SWT_DT timestamp without time zone
);'''
table = 'data_trim'
cursor.execute(drop_qry.format(table=table))
connection.commit()
cursor.execute(create_qry.format(table=table))
connection.commit()
print 'table created'
start = dt.datetime.now()
chunksize = 10000
j = 0
index_start = 1
###################data source
file_loc = data_dir+auth_name
########################
dtFormat = "%d%b%Y %H:%M:%S.%f"
def getTime(x):
dtString = "{} {}".format(x.AUTHZN_RQST_PROC_DT,x.AUTHZN_RQST_PROC_TM)
return dt.datetime.strptime(dtString,dtFormat)
for df in pd.read_csv(file_loc, chunksize=chunksize, iterator=True,encoding='ISO-8859-1'):
df = df.rename(columns={c: c.replace(' ', '') for c in df.columns}) # Remove spaces from columns
# df['AUTHZN_RQST_PROC_DT'] = pd.to_datetime(df['AUTHZN_RQST_PROC_DT'],format='%d%b%Y') # Convert to datetimes
# df['AUTHZN_RQST_PROC_TM'] = df['AUTHZN_RQST_PROC_DT']+ pd.to_datetime(df.AUTHZN_RQST_PROC_TM).dt.time
df['acct_id'] = df['acct_id'].astype(str)
df['AUTHZN_RQST_PROC_TM'] = df.apply(lambda x: getTime(x),1)
df['AUTHZN_APPRL_CD'] =pd.to_numeric(df['AUTHZN_APPRL_CD'], errors='coerce')
df['AUTHZN_APPRL_CD'] =df['AUTHZN_APPRL_CD'].astype(str)
df.MRCH_CNTRY_CD = df.MRCH_CNTRY_CD.astype(str)
df.MRCH_CATG_CD = df.MRCH_CATG_CD.astype(str)
df.AUTHZN_MSG_TYPE_MODR_CD = df.AUTHZN_MSG_TYPE_MODR_CD.astype(str)
df.RQST_CARD_SEQ_NUM = df.RQST_CARD_SEQ_NUM.astype(str)
df.ECMRC_SCURT_CD = df.ECMRC_SCURT_CD.astype(str)
df.ACQR_BIN_NUM = df.ACQR_BIN_NUM.astype(str)
df.PREV_ADR_CHNG_DT =pd.to_datetime(df.PREV_ADR_CHNG_DT,errors='coerce',format='%d%b%Y')
df.PREV_PMT_DT = pd.to_datetime(df.PREV_PMT_DT,errors='coerce',format='%d%b%Y')
df.PREV_CARD_RQST_DT = pd.to_datetime(df.PREV_CARD_RQST_DT,errors='coerce',format='%d%b%Y')
df.FRD_IND_SWT_DT = pd.to_datetime(df.FRD_IND_SWT_DT,errors='coerce',format='%d%b%Y')
# df['AUTHZN_RQST_PROC_TM'] = pd.to_datetime(df[['AUTHZN_RQST_PROC_DT','AUTHZN_RQST_PROC_TM']],format='%Y%m%d%H')
# df['AUTHZN_RQST_PROC_TM'] = pd.to_datetime(df.AUTHZN_RQST_PROC_DT.dt.strftime('%Y-%m-%d') +' '+ df.AUTHZN_RQST_PROC_TM.dt.strftime('%H'))
# df['PREV_ADR_CHNG_DT'] = pd.to_datetime(df['PREV_ADR_CHNG_DT'])
# df['PREV_PMT_DT'] = pd.to_datetime(df['PREV_PMT_DT'])
# df['PREV_CARD_RQST_DT'] = pd.to_datetime(df['PREV_CARD_RQST_DT'])
# df['FRD_IND_SWT_DT'] = pd.to_datetime(df['FRD_IND_SWT_DT'])
df.index += index_start
# Remove the un-interesting columns
columns = ['AUTHZN_RQST_PROC_DT','EXCSV_ACTVY_PARM_CD']
for c in df.columns:
if c in columns:
df = df.drop(c, axis=1)
j+=1
print '{} seconds: completed {} rows'.format((dt.datetime.now() - start).seconds, j*chunksize)
# display(df)
# print df.dtypes
# break
# table = 'data_trim'
# df.to_sql(table, disk_engine, if_exists='append')
output = cStringIO.StringIO()
#ignore the index
df.to_csv(output, sep='\t', header=False, index=True,encoding='utf-8')
# print df[df['AUTHZN_RQST_PROC_TM']=='nan']
#jump to start of stream
output.seek(0)
contents = output.getvalue()
cur = connection.cursor()
#null values become ''
t_mid = dt.datetime.now()
cur.copy_from(output, table, null="", size=200000)
connection.commit()
cur.close()
print '{} seconds: inserted {} rows'.format((dt.datetime.now() - t_mid).seconds, j*chunksize)
index_start = df.index[-1] + 1
break
df = pd.read_sql_query('select * from {table} limit 5'.format(table=table),engine)
col_names = df.columns.values
print col_names
for c,name in enumerate(col_names):
if name =='index':
continue
t_mid = dt.datetime.now()
cursor.execute('''CREATE INDEX id_{table}_{col}
ON {table} ({col})'''.format(table=table,col=name))
connection.commit()
print '{} index created in {}'.format(name,(dt.datetime.now() - t_mid))
print 'idxs created!' | bottydim/detect-credit-card-fraud | ccfd_dnn/db_export.py | Python | mit | 6,348 |
#!/usr/bin/python
# Copyright (c) 2013 The Bitsend Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import json
from urllib import urlopen
import requests
import getpass
from string import Template
import sys
import os
import subprocess
class RunError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def run(command, **kwargs):
fail_hard = kwargs.pop("fail_hard", True)
# output to /dev/null by default:
kwargs.setdefault("stdout", open('/dev/null', 'w'))
kwargs.setdefault("stderr", open('/dev/null', 'w'))
command = Template(command).substitute(os.environ)
if "TRACE" in os.environ:
if 'cwd' in kwargs:
print("[cwd=%s] %s"%(kwargs['cwd'], command))
else: print(command)
try:
process = subprocess.Popen(command.split(' '), **kwargs)
process.wait()
except KeyboardInterrupt:
process.terminate()
raise
if process.returncode != 0 and fail_hard:
raise RunError("Failed: "+command)
return process.returncode
def checkout_pull(clone_url, commit, out):
# Init
build_dir=os.environ["BUILD_DIR"]
run("umount ${CHROOT_COPY}/proc", fail_hard=False)
run("rsync --delete -apv ${CHROOT_MASTER}/ ${CHROOT_COPY}")
run("rm -rf ${CHROOT_COPY}${SCRIPTS_DIR}")
run("cp -a ${SCRIPTS_DIR} ${CHROOT_COPY}${SCRIPTS_DIR}")
# Merge onto upstream/master
run("rm -rf ${BUILD_DIR}")
run("mkdir -p ${BUILD_DIR}")
run("git clone ${CLONE_URL} ${BUILD_DIR}")
run("git remote add pull "+clone_url, cwd=build_dir, stdout=out, stderr=out)
run("git fetch pull", cwd=build_dir, stdout=out, stderr=out)
if run("git merge "+ commit, fail_hard=False, cwd=build_dir, stdout=out, stderr=out) != 0:
return False
run("chown -R ${BUILD_USER}:${BUILD_GROUP} ${BUILD_DIR}", stdout=out, stderr=out)
run("mount --bind /proc ${CHROOT_COPY}/proc")
return True
def commentOn(commentUrl, success, inMerge, needTests, linkUrl):
common_message = """
This test script verifies pulls every time they are updated. It, however, dies sometimes and fails to test properly. If you are waiting on a test, please check timestamps to verify that the test.log is moving at http://jenkins.bluematt.me/pull-tester/current/
Contact BlueMatt on freenode if something looks broken."""
# Remove old BitsendPullTester comments (I'm being lazy and not paginating here)
recentcomments = requests.get(commentUrl+"?sort=created&direction=desc",
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
for comment in recentcomments:
if comment["user"]["login"] == os.environ["GITHUB_USER"] and common_message in comment["body"]:
requests.delete(comment["url"],
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"]))
if success == True:
if needTests:
message = "Automatic sanity-testing: PLEASE ADD TEST-CASES, though technically passed. See " + linkUrl + " for binaries and test log."
else:
message = "Automatic sanity-testing: PASSED, see " + linkUrl + " for binaries and test log."
post_data = { "body" : message + common_message}
elif inMerge:
post_data = { "body" : "Automatic sanity-testing: FAILED MERGE, see " + linkUrl + " for test log." + """
This pull does not merge cleanly onto current master""" + common_message}
else:
post_data = { "body" : "Automatic sanity-testing: FAILED BUILD/TEST, see " + linkUrl + " for binaries and test log." + """
This could happen for one of several reasons:
1. It chanages changes build scripts in a way that made them incompatible with the automated testing scripts (please tweak those patches in qa/pull-tester)
2. It adds/modifies tests which test network rules (thanks for doing that), which conflicts with a patch applied at test time
3. It does not build on either Linux i386 or Win32 (via MinGW cross compile)
4. The test suite fails on either Linux i386 or Win32
5. The block test-cases failed (lookup the first bNN identifier which failed in https://github.com/TheBlueMatt/test-scripts/blob/master/FullBlockTestGenerator.java)
If you believe this to be in error, please ping BlueMatt on freenode or TheBlueMatt here.
""" + common_message}
resp = requests.post(commentUrl, json.dumps(post_data), auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"]))
def testpull(number, comment_url, clone_url, commit):
print("Testing pull %d: %s : %s"%(number, clone_url,commit))
dir = os.environ["RESULTS_DIR"] + "/" + commit + "/"
print(" ouput to %s"%dir)
if os.path.exists(dir):
os.system("rm -r " + dir)
os.makedirs(dir)
currentdir = os.environ["RESULTS_DIR"] + "/current"
os.system("rm -r "+currentdir)
os.system("ln -s " + dir + " " + currentdir)
out = open(dir + "test.log", 'w+')
resultsurl = os.environ["RESULTS_URL"] + commit
checkedout = checkout_pull(clone_url, commit, out)
if checkedout != True:
print("Failed to test pull - sending comment to: " + comment_url)
commentOn(comment_url, False, True, False, resultsurl)
open(os.environ["TESTED_DB"], "a").write(commit + "\n")
return
run("rm -rf ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False);
run("mkdir -p ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False);
run("chown -R ${BUILD_USER}:${BUILD_GROUP} ${CHROOT_COPY}/${OUT_DIR}", fail_hard=False)
script = os.environ["BUILD_PATH"]+"/qa/pull-tester/pull-tester.sh"
script += " ${BUILD_PATH} ${MINGW_DEPS_DIR} ${SCRIPTS_DIR}/BitsenddComparisonTool_jar/BitsenddComparisonTool.jar 0 6 ${OUT_DIR}"
returncode = run("chroot ${CHROOT_COPY} sudo -u ${BUILD_USER} -H timeout ${TEST_TIMEOUT} "+script,
fail_hard=False, stdout=out, stderr=out)
run("mv ${CHROOT_COPY}/${OUT_DIR} " + dir)
run("mv ${BUILD_DIR} " + dir)
if returncode == 42:
print("Successfully tested pull (needs tests) - sending comment to: " + comment_url)
commentOn(comment_url, True, False, True, resultsurl)
elif returncode != 0:
print("Failed to test pull - sending comment to: " + comment_url)
commentOn(comment_url, False, False, False, resultsurl)
else:
print("Successfully tested pull - sending comment to: " + comment_url)
commentOn(comment_url, True, False, False, resultsurl)
open(os.environ["TESTED_DB"], "a").write(commit + "\n")
def environ_default(setting, value):
if not setting in os.environ:
os.environ[setting] = value
if getpass.getuser() != "root":
print("Run me as root!")
sys.exit(1)
if "GITHUB_USER" not in os.environ or "GITHUB_AUTH_TOKEN" not in os.environ:
print("GITHUB_USER and/or GITHUB_AUTH_TOKEN environment variables not set")
sys.exit(1)
environ_default("CLONE_URL", "https://github.com/bitsend/bitsend.git")
environ_default("MINGW_DEPS_DIR", "/mnt/w32deps")
environ_default("SCRIPTS_DIR", "/mnt/test-scripts")
environ_default("CHROOT_COPY", "/mnt/chroot-tmp")
environ_default("CHROOT_MASTER", "/mnt/chroot")
environ_default("OUT_DIR", "/mnt/out")
environ_default("BUILD_PATH", "/mnt/bitsend")
os.environ["BUILD_DIR"] = os.environ["CHROOT_COPY"] + os.environ["BUILD_PATH"]
environ_default("RESULTS_DIR", "/mnt/www/pull-tester")
environ_default("RESULTS_URL", "http://jenkins.bluematt.me/pull-tester/")
environ_default("GITHUB_REPO", "bitsend/bitsend")
environ_default("TESTED_DB", "/mnt/commits-tested.txt")
environ_default("BUILD_USER", "matt")
environ_default("BUILD_GROUP", "matt")
environ_default("TEST_TIMEOUT", str(60*60*2))
print("Optional usage: pull-tester.py 2112")
f = open(os.environ["TESTED_DB"])
tested = set( line.rstrip() for line in f.readlines() )
f.close()
if len(sys.argv) > 1:
pull = requests.get("https://api.github.com/repos/"+os.environ["GITHUB_REPO"]+"/pulls/"+sys.argv[1],
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
testpull(pull["number"], pull["_links"]["comments"]["href"],
pull["head"]["repo"]["clone_url"], pull["head"]["sha"])
else:
for page in range(1,100):
result = requests.get("https://api.github.com/repos/"+os.environ["GITHUB_REPO"]+"/pulls?state=open&page=%d"%(page,),
auth=(os.environ['GITHUB_USER'], os.environ["GITHUB_AUTH_TOKEN"])).json
if len(result) == 0: break;
for pull in result:
if pull["head"]["sha"] in tested:
print("Pull %d already tested"%(pull["number"],))
continue
testpull(pull["number"], pull["_links"]["comments"]["href"],
pull["head"]["repo"]["clone_url"], pull["head"]["sha"])
| JohnMnemonick/UralsCoin | qa/pull-tester/pull-tester.py | Python | mit | 8,944 |
try:
import winsound
except ImportError:
pass
import sys
import threading
def play(wav_file):
sound_thread = SoundThread(wav_file)
sound_thread.start()
class SoundThread(threading.Thread):
def __init__(self, wav_file):
threading.Thread.__init__(self)
self.wav_file = wav_file
def run(self):
if sys.platform == "win32":
winsound.PlaySound(self.wav_file, winsound.SND_FILENAME)
else:
# TODO: Add non-win audio support
# I know, it's a shame, but Windows does have one thing going for it, and that's predictability.
# Non-Win audio coming soonish.
pass
| RainDogSoftware/pingpung | pingpung/pplib/audio.py | Python | gpl-2.0 | 674 |
# -*- coding: utf-8 -*-
"""
remotely
~~~~~~~~
:copyright: (c) 2012 by Kefei Dan Zhou
:license: ISC, see LICENSE for more details.
"""
__title__ = 'remotely'
__version__ = '0.2.0'
__author__ = 'Kefei Dan Zhou'
__copyright__ = 'Copyright 2012 Kefei Dan Zhou'
from .remotely import remotely
from .remotely import RemoteClient
from .remotely_server import create_remotely_server
from .remotely_server import RemotelyException
| dzhou/remotely | remotely/__init__.py | Python | bsd-3-clause | 428 |
# -*- coding: utf-8 -*
from __future__ import absolute_import, unicode_literals
import base64
import collections
import json
import unittest
import warnings
from datetime import date, datetime
from decimal import Decimal
# non-standard import name for ugettext_lazy, to prevent strings from being picked up for translation
import django
from django import forms
from django.core.exceptions import ValidationError
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.test import SimpleTestCase, TestCase
from django.utils.html import format_html
from django.utils.safestring import SafeData, mark_safe
from django.utils.translation import ugettext_lazy as __
from wagtail.tests.testapp.blocks import LinkBlock as CustomLinkBlock
from wagtail.tests.testapp.blocks import SectionBlock
from wagtail.tests.testapp.models import EventPage, SimplePage
from wagtail.tests.utils import WagtailTestUtils
from wagtail.utils.deprecation import RemovedInWagtail111Warning
from wagtail.wagtailcore import blocks
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.rich_text import RichText
class FooStreamBlock(blocks.StreamBlock):
text = blocks.CharBlock()
error = 'At least one block must say "foo"'
def clean(self, value):
value = super(FooStreamBlock, self).clean(value)
if not any(block.value == 'foo' for block in value):
raise blocks.StreamBlockValidationError(non_block_errors=ErrorList([self.error]))
return value
class NoExtraContextCharBlock(blocks.CharBlock):
def get_context(self, value):
return super(blocks.CharBlock, self).get_context(value)
class ContextCharBlock(blocks.CharBlock):
def get_context(self, value, parent_context=None):
value = str(value).upper()
return super(blocks.CharBlock, self).get_context(value, parent_context)
class TestFieldBlock(WagtailTestUtils, SimpleTestCase):
def test_charfield_render(self):
block = blocks.CharBlock()
html = block.render("Hello world!")
self.assertEqual(html, "Hello world!")
def test_charfield_render_with_template(self):
block = blocks.CharBlock(template='tests/blocks/heading_block.html')
html = block.render("Hello world!")
self.assertEqual(html, '<h1>Hello world!</h1>')
def test_charfield_render_with_template_with_extra_context(self):
block = ContextCharBlock(template='tests/blocks/heading_block.html')
html = block.render("Bonjour le monde!", context={
'language': 'fr',
})
self.assertEqual(html, '<h1 lang="fr">BONJOUR LE MONDE!</h1>')
def test_charfield_render_with_legacy_get_context(self):
block = NoExtraContextCharBlock(template='tests/blocks/heading_block.html')
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter('always')
html = block.render("Bonjour le monde!", context={
'language': 'fr',
})
self.assertEqual(len(ws), 1)
self.assertIs(ws[0].category, RemovedInWagtail111Warning)
self.assertEqual(html, '<h1 lang="fr">Bonjour le monde!</h1>')
def test_charfield_render_with_legacy_get_context_none(self):
block = NoExtraContextCharBlock(template='tests/blocks/heading_block.html')
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter('always')
html = block.render("Bonjour le monde!")
self.assertEqual(len(ws), 1)
self.assertIs(ws[0].category, RemovedInWagtail111Warning)
self.assertEqual(html, '<h1>Bonjour le monde!</h1>')
def test_charfield_render_form(self):
block = blocks.CharBlock()
html = block.render_form("Hello world!")
self.assertIn('<div class="field char_field widget-text_input">', html)
self.assertInHTML('<input id="" name="" placeholder="" type="text" value="Hello world!" />', html)
def test_charfield_render_form_with_prefix(self):
block = blocks.CharBlock()
html = block.render_form("Hello world!", prefix='foo')
self.assertInHTML('<input id="foo" name="foo" placeholder="" type="text" value="Hello world!" />', html)
def test_charfield_render_form_with_error(self):
block = blocks.CharBlock()
html = block.render_form(
"Hello world!",
errors=ErrorList([ValidationError("This field is required.")]))
self.assertIn('This field is required.', html)
def test_charfield_searchable_content(self):
block = blocks.CharBlock()
content = block.get_searchable_content("Hello world!")
self.assertEqual(content, ["Hello world!"])
def test_choicefield_render(self):
class ChoiceBlock(blocks.FieldBlock):
field = forms.ChoiceField(choices=(
('choice-1', "Choice 1"),
('choice-2', "Choice 2"),
))
block = ChoiceBlock()
html = block.render('choice-2')
self.assertEqual(html, "choice-2")
def test_choicefield_render_form(self):
class ChoiceBlock(blocks.FieldBlock):
field = forms.ChoiceField(choices=(
('choice-1', "Choice 1"),
('choice-2', "Choice 2"),
))
block = ChoiceBlock()
html = block.render_form('choice-2')
self.assertIn('<div class="field choice_field widget-select">', html)
self.assertTagInHTML('<select id="" name="" placeholder="">', html)
self.assertInHTML('<option value="choice-1">Choice 1</option>', html)
self.assertInHTML('<option value="choice-2" selected="selected">Choice 2</option>', html)
def test_searchable_content(self):
"""
FieldBlock should not return anything for `get_searchable_content` by
default. Subclasses are free to override it and provide relevant
content.
"""
class CustomBlock(blocks.FieldBlock):
field = forms.CharField(required=True)
block = CustomBlock()
self.assertEqual(block.get_searchable_content("foo bar"), [])
def test_form_handling_is_independent_of_serialisation(self):
class Base64EncodingCharBlock(blocks.CharBlock):
"""A CharBlock with a deliberately perverse JSON (de)serialisation format
so that it visibly blows up if we call to_python / get_prep_value where we shouldn't"""
def to_python(self, jsonish_value):
# decode as base64 on the way out of the JSON serialisation
return base64.b64decode(jsonish_value)
def get_prep_value(self, native_value):
# encode as base64 on the way into the JSON serialisation
return base64.b64encode(native_value)
block = Base64EncodingCharBlock()
form_html = block.render_form('hello world', 'title')
self.assertIn('value="hello world"', form_html)
value_from_form = block.value_from_datadict({'title': 'hello world'}, {}, 'title')
self.assertEqual('hello world', value_from_form)
def test_widget_media(self):
class CalendarWidget(forms.TextInput):
@property
def media(self):
return forms.Media(
css={'all': ('pretty.css',)},
js=('animations.js', 'actions.js')
)
class CalenderBlock(blocks.FieldBlock):
def __init__(self, required=True, help_text=None, max_length=None, min_length=None, **kwargs):
# Set widget to CalenderWidget
self.field = forms.CharField(
required=required,
help_text=help_text,
max_length=max_length,
min_length=min_length,
widget=CalendarWidget(),
)
super(blocks.FieldBlock, self).__init__(**kwargs)
block = CalenderBlock()
self.assertIn('pretty.css', ''.join(block.all_media().render_css()))
self.assertIn('animations.js', ''.join(block.all_media().render_js()))
class TestIntegerBlock(unittest.TestCase):
def test_type(self):
block = blocks.IntegerBlock()
digit = block.value_from_form(1234)
self.assertEqual(type(digit), int)
def test_render(self):
block = blocks.IntegerBlock()
digit = block.value_from_form(1234)
self.assertEqual(digit, 1234)
def test_render_required_error(self):
block = blocks.IntegerBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_render_max_value_validation(self):
block = blocks.IntegerBlock(max_value=20)
with self.assertRaises(ValidationError):
block.clean(25)
def test_render_min_value_validation(self):
block = blocks.IntegerBlock(min_value=20)
with self.assertRaises(ValidationError):
block.clean(10)
class TestEmailBlock(unittest.TestCase):
def test_render(self):
block = blocks.EmailBlock()
email = block.render("example@email.com")
self.assertEqual(email, "example@email.com")
def test_render_required_error(self):
block = blocks.EmailBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_format_validation(self):
block = blocks.EmailBlock()
with self.assertRaises(ValidationError):
block.clean("example.email.com")
class TestBlockQuoteBlock(unittest.TestCase):
def test_render(self):
block = blocks.BlockQuoteBlock()
quote = block.render("Now is the time...")
self.assertEqual(quote, "<blockquote>Now is the time...</blockquote>")
class TestFloatBlock(TestCase):
def test_type(self):
block = blocks.FloatBlock()
block_val = block.value_from_form(float(1.63))
self.assertEqual(type(block_val), float)
def test_render(self):
block = blocks.FloatBlock()
test_val = float(1.63)
block_val = block.value_from_form(test_val)
self.assertEqual(block_val, test_val)
def test_raises_required_error(self):
block = blocks.FloatBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_raises_max_value_validation_error(self):
block = blocks.FloatBlock(max_value=20)
with self.assertRaises(ValidationError):
block.clean('20.01')
def test_raises_min_value_validation_error(self):
block = blocks.FloatBlock(min_value=20)
with self.assertRaises(ValidationError):
block.clean('19.99')
class TestDecimalBlock(TestCase):
def test_type(self):
block = blocks.DecimalBlock()
block_val = block.value_from_form(Decimal('1.63'))
self.assertEqual(type(block_val), Decimal)
def test_render(self):
block = blocks.DecimalBlock()
test_val = Decimal(1.63)
block_val = block.value_from_form(test_val)
self.assertEqual(block_val, test_val)
def test_raises_required_error(self):
block = blocks.DecimalBlock()
with self.assertRaises(ValidationError):
block.clean("")
def test_raises_max_value_validation_error(self):
block = blocks.DecimalBlock(max_value=20)
with self.assertRaises(ValidationError):
block.clean('20.01')
def test_raises_min_value_validation_error(self):
block = blocks.DecimalBlock(min_value=20)
with self.assertRaises(ValidationError):
block.clean('19.99')
class TestRegexBlock(TestCase):
def test_render(self):
block = blocks.RegexBlock(regex=r'^[0-9]{3}$')
test_val = '123'
block_val = block.value_from_form(test_val)
self.assertEqual(block_val, test_val)
def test_raises_required_error(self):
block = blocks.RegexBlock(regex=r'^[0-9]{3}$')
with self.assertRaises(ValidationError) as context:
block.clean("")
self.assertIn('This field is required.', context.exception.messages)
def test_raises_custom_required_error(self):
test_message = 'Oops, you missed a bit.'
block = blocks.RegexBlock(regex=r'^[0-9]{3}$', error_messages={
'required': test_message,
})
with self.assertRaises(ValidationError) as context:
block.clean("")
self.assertIn(test_message, context.exception.messages)
def test_raises_validation_error(self):
block = blocks.RegexBlock(regex=r'^[0-9]{3}$')
with self.assertRaises(ValidationError) as context:
block.clean("[/]")
self.assertIn('Enter a valid value.', context.exception.messages)
def test_raises_custom_error_message(self):
test_message = 'Not a valid library card number.'
block = blocks.RegexBlock(regex=r'^[0-9]{3}$', error_messages={
'invalid': test_message
})
with self.assertRaises(ValidationError) as context:
block.clean("[/]")
self.assertIn(test_message, context.exception.messages)
html = block.render_form(
"[/]",
errors=ErrorList([ValidationError(test_message)]))
self.assertIn(test_message, html)
class TestRichTextBlock(TestCase):
fixtures = ['test.json']
def test_get_default_with_fallback_value(self):
default_value = blocks.RichTextBlock().get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, '')
def test_get_default_with_default_none(self):
default_value = blocks.RichTextBlock(default=None).get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, '')
def test_get_default_with_empty_string(self):
default_value = blocks.RichTextBlock(default='').get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, '')
def test_get_default_with_nonempty_string(self):
default_value = blocks.RichTextBlock(default='<p>foo</p>').get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, '<p>foo</p>')
def test_get_default_with_richtext_value(self):
default_value = blocks.RichTextBlock(default=RichText('<p>foo</p>')).get_default()
self.assertIsInstance(default_value, RichText)
self.assertEqual(default_value.source, '<p>foo</p>')
def test_render(self):
block = blocks.RichTextBlock()
value = RichText('<p>Merry <a linktype="page" id="4">Christmas</a>!</p>')
result = block.render(value)
self.assertEqual(
result, '<div class="rich-text"><p>Merry <a href="/events/christmas/">Christmas</a>!</p></div>'
)
def test_render_form(self):
"""
render_form should produce the editor-specific rendition of the rich text value
(which includes e.g. 'data-linktype' attributes on <a> elements)
"""
block = blocks.RichTextBlock()
value = RichText('<p>Merry <a linktype="page" id="4">Christmas</a>!</p>')
result = block.render_form(value, prefix='richtext')
self.assertIn(
(
'<p>Merry <a data-linktype="page" data-id="4"'
' data-parent-id="3" href="/events/christmas/">Christmas</a>!</p>'
),
result
)
def test_validate_required_richtext_block(self):
block = blocks.RichTextBlock()
with self.assertRaises(ValidationError):
block.clean(RichText(''))
def test_validate_non_required_richtext_block(self):
block = blocks.RichTextBlock(required=False)
result = block.clean(RichText(''))
self.assertIsInstance(result, RichText)
self.assertEqual(result.source, '')
class TestChoiceBlock(WagtailTestUtils, SimpleTestCase):
def setUp(self):
from django.db.models.fields import BLANK_CHOICE_DASH
self.blank_choice_dash_label = BLANK_CHOICE_DASH[0][1]
def test_render_required_choice_block(self):
block = blocks.ChoiceBlock(choices=[('tea', 'Tea'), ('coffee', 'Coffee')])
html = block.render_form('coffee', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
# blank option should still be rendered for required fields
# (we may want it as an initial value)
self.assertIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertInHTML('<option value="coffee" selected="selected">Coffee</option>', html)
def test_render_required_choice_block_with_default(self):
block = blocks.ChoiceBlock(choices=[('tea', 'Tea'), ('coffee', 'Coffee')], default='tea')
html = block.render_form('coffee', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
# blank option should NOT be rendered if default and required are set.
self.assertNotIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertInHTML('<option value="coffee" selected="selected">Coffee</option>', html)
def test_render_required_choice_block_with_callable_choices(self):
def callable_choices():
return [('tea', 'Tea'), ('coffee', 'Coffee')]
block = blocks.ChoiceBlock(choices=callable_choices)
html = block.render_form('coffee', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
# blank option should still be rendered for required fields
# (we may want it as an initial value)
self.assertIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertInHTML('<option value="coffee" selected="selected">Coffee</option>', html)
def test_validate_required_choice_block(self):
block = blocks.ChoiceBlock(choices=[('tea', 'Tea'), ('coffee', 'Coffee')])
self.assertEqual(block.clean('coffee'), 'coffee')
with self.assertRaises(ValidationError):
block.clean('whisky')
with self.assertRaises(ValidationError):
block.clean('')
with self.assertRaises(ValidationError):
block.clean(None)
def test_render_non_required_choice_block(self):
block = blocks.ChoiceBlock(choices=[('tea', 'Tea'), ('coffee', 'Coffee')], required=False)
html = block.render_form('coffee', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertInHTML('<option value="coffee" selected="selected">Coffee</option>', html)
def test_render_non_required_choice_block_with_callable_choices(self):
def callable_choices():
return [('tea', 'Tea'), ('coffee', 'Coffee')]
block = blocks.ChoiceBlock(choices=callable_choices, required=False)
html = block.render_form('coffee', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertInHTML('<option value="coffee" selected="selected">Coffee</option>', html)
def test_validate_non_required_choice_block(self):
block = blocks.ChoiceBlock(choices=[('tea', 'Tea'), ('coffee', 'Coffee')], required=False)
self.assertEqual(block.clean('coffee'), 'coffee')
with self.assertRaises(ValidationError):
block.clean('whisky')
self.assertEqual(block.clean(''), '')
self.assertEqual(block.clean(None), '')
def test_render_choice_block_with_existing_blank_choice(self):
block = blocks.ChoiceBlock(
choices=[('tea', 'Tea'), ('coffee', 'Coffee'), ('', 'No thanks')],
required=False)
html = block.render_form(None, prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertNotIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertInHTML('<option value="" selected="selected">No thanks</option>', html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertInHTML('<option value="coffee">Coffee</option>', html)
def test_render_choice_block_with_existing_blank_choice_and_with_callable_choices(self):
def callable_choices():
return [('tea', 'Tea'), ('coffee', 'Coffee'), ('', 'No thanks')]
block = blocks.ChoiceBlock(
choices=callable_choices,
required=False)
html = block.render_form(None, prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertNotIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertInHTML('<option value="" selected="selected">No thanks</option>', html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertIn('<option value="coffee">Coffee</option>', html)
def test_named_groups_without_blank_option(self):
block = blocks.ChoiceBlock(
choices=[
('Alcoholic', [
('gin', 'Gin'),
('whisky', 'Whisky'),
]),
('Non-alcoholic', [
('tea', 'Tea'),
('coffee', 'Coffee'),
]),
])
# test rendering with the blank option selected
html = block.render_form(None, prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertInHTML('<option value="" selected="selected">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<optgroup label="Alcoholic">', html)
self.assertIn('<option value="tea">Tea</option>', html)
# test rendering with a non-blank option selected
html = block.render_form('tea', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<optgroup label="Alcoholic">', html)
self.assertInHTML('<option value="tea" selected="selected">Tea</option>', html)
def test_named_groups_with_blank_option(self):
block = blocks.ChoiceBlock(
choices=[
('Alcoholic', [
('gin', 'Gin'),
('whisky', 'Whisky'),
]),
('Non-alcoholic', [
('tea', 'Tea'),
('coffee', 'Coffee'),
]),
('Not thirsty', [
('', 'No thanks')
]),
],
required=False)
# test rendering with the blank option selected
html = block.render_form(None, prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertNotIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertNotInHTML('<option value="" selected="selected">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<optgroup label="Alcoholic">', html)
self.assertIn('<option value="tea">Tea</option>', html)
self.assertInHTML('<option value="" selected="selected">No thanks</option>', html)
# test rendering with a non-blank option selected
html = block.render_form('tea', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertNotIn('<option value="">%s</option>' % self.blank_choice_dash_label, html)
self.assertNotInHTML('<option value="" selected="selected">%s</option>' % self.blank_choice_dash_label, html)
self.assertIn('<optgroup label="Alcoholic">', html)
self.assertInHTML('<option value="tea" selected="selected">Tea</option>', html)
def test_subclassing(self):
class BeverageChoiceBlock(blocks.ChoiceBlock):
choices = [
('tea', 'Tea'),
('coffee', 'Coffee'),
]
block = BeverageChoiceBlock(required=False)
html = block.render_form('tea', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertInHTML('<option value="tea" selected="selected">Tea</option>', html)
# subclasses of ChoiceBlock should deconstruct to a basic ChoiceBlock for migrations
self.assertEqual(
block.deconstruct(),
(
'wagtail.wagtailcore.blocks.ChoiceBlock',
[],
{
'choices': [('tea', 'Tea'), ('coffee', 'Coffee')],
'required': False,
},
)
)
def test_searchable_content(self):
block = blocks.ChoiceBlock(choices=[
('choice-1', "Choice 1"),
('choice-2', "Choice 2"),
])
self.assertEqual(block.get_searchable_content("choice-1"),
["Choice 1"])
def test_searchable_content_with_callable_choices(self):
def callable_choices():
return [
('choice-1', "Choice 1"),
('choice-2', "Choice 2"),
]
block = blocks.ChoiceBlock(choices=callable_choices)
self.assertEqual(block.get_searchable_content("choice-1"),
["Choice 1"])
def test_optgroup_searchable_content(self):
block = blocks.ChoiceBlock(choices=[
('Section 1', [
('1-1', "Block 1"),
('1-2', "Block 2"),
]),
('Section 2', [
('2-1', "Block 1"),
('2-2', "Block 2"),
]),
])
self.assertEqual(block.get_searchable_content("2-2"),
["Section 2", "Block 2"])
def test_invalid_searchable_content(self):
block = blocks.ChoiceBlock(choices=[
('one', 'One'),
('two', 'Two'),
])
self.assertEqual(block.get_searchable_content('three'), [])
def test_searchable_content_with_lazy_translation(self):
block = blocks.ChoiceBlock(choices=[
('choice-1', __("Choice 1")),
('choice-2', __("Choice 2")),
])
result = block.get_searchable_content("choice-1")
# result must survive JSON (de)serialisation, which is not the case for
# lazy translation objects
result = json.loads(json.dumps(result))
self.assertEqual(result, ["Choice 1"])
def test_optgroup_searchable_content_with_lazy_translation(self):
block = blocks.ChoiceBlock(choices=[
(__('Section 1'), [
('1-1', __("Block 1")),
('1-2', __("Block 2")),
]),
(__('Section 2'), [
('2-1', __("Block 1")),
('2-2', __("Block 2")),
]),
])
result = block.get_searchable_content("2-2")
# result must survive JSON (de)serialisation, which is not the case for
# lazy translation objects
result = json.loads(json.dumps(result))
self.assertEqual(result, ["Section 2", "Block 2"])
def test_deconstruct_with_callable_choices(self):
def callable_choices():
return [
('tea', 'Tea'),
('coffee', 'Coffee'),
]
block = blocks.ChoiceBlock(choices=callable_choices, required=False)
html = block.render_form('tea', prefix='beverage')
self.assertTagInHTML('<select id="beverage" name="beverage" placeholder="">', html)
self.assertInHTML('<option value="tea" selected="selected">Tea</option>', html)
self.assertEqual(
block.deconstruct(),
(
'wagtail.wagtailcore.blocks.ChoiceBlock',
[],
{
'choices': callable_choices,
'required': False,
},
)
)
class TestRawHTMLBlock(unittest.TestCase):
def test_get_default_with_fallback_value(self):
default_value = blocks.RawHTMLBlock().get_default()
self.assertEqual(default_value, '')
self.assertIsInstance(default_value, SafeData)
def test_get_default_with_none(self):
default_value = blocks.RawHTMLBlock(default=None).get_default()
self.assertEqual(default_value, '')
self.assertIsInstance(default_value, SafeData)
def test_get_default_with_empty_string(self):
default_value = blocks.RawHTMLBlock(default='').get_default()
self.assertEqual(default_value, '')
self.assertIsInstance(default_value, SafeData)
def test_get_default_with_nonempty_string(self):
default_value = blocks.RawHTMLBlock(default='<blink>BÖÖM</blink>').get_default()
self.assertEqual(default_value, '<blink>BÖÖM</blink>')
self.assertIsInstance(default_value, SafeData)
def test_serialize(self):
block = blocks.RawHTMLBlock()
result = block.get_prep_value(mark_safe('<blink>BÖÖM</blink>'))
self.assertEqual(result, '<blink>BÖÖM</blink>')
self.assertNotIsInstance(result, SafeData)
def test_deserialize(self):
block = blocks.RawHTMLBlock()
result = block.to_python('<blink>BÖÖM</blink>')
self.assertEqual(result, '<blink>BÖÖM</blink>')
self.assertIsInstance(result, SafeData)
def test_render(self):
block = blocks.RawHTMLBlock()
result = block.render(mark_safe('<blink>BÖÖM</blink>'))
self.assertEqual(result, '<blink>BÖÖM</blink>')
self.assertIsInstance(result, SafeData)
def test_render_form(self):
block = blocks.RawHTMLBlock()
result = block.render_form(mark_safe('<blink>BÖÖM</blink>'), prefix='rawhtml')
self.assertIn('<textarea ', result)
self.assertIn('name="rawhtml"', result)
self.assertIn('<blink>BÖÖM</blink>', result)
def test_form_response(self):
block = blocks.RawHTMLBlock()
result = block.value_from_datadict({'rawhtml': '<blink>BÖÖM</blink>'}, {}, prefix='rawhtml')
self.assertEqual(result, '<blink>BÖÖM</blink>')
self.assertIsInstance(result, SafeData)
@unittest.skipIf(django.VERSION < (1, 10, 2), "value_omitted_from_data is not available")
def test_value_omitted_from_data(self):
block = blocks.RawHTMLBlock()
self.assertFalse(block.value_omitted_from_data({'rawhtml': 'ohai'}, {}, 'rawhtml'))
self.assertFalse(block.value_omitted_from_data({'rawhtml': ''}, {}, 'rawhtml'))
self.assertTrue(block.value_omitted_from_data({'nothing-here': 'nope'}, {}, 'rawhtml'))
def test_clean_required_field(self):
block = blocks.RawHTMLBlock()
result = block.clean(mark_safe('<blink>BÖÖM</blink>'))
self.assertEqual(result, '<blink>BÖÖM</blink>')
self.assertIsInstance(result, SafeData)
with self.assertRaises(ValidationError):
block.clean(mark_safe(''))
def test_clean_nonrequired_field(self):
block = blocks.RawHTMLBlock(required=False)
result = block.clean(mark_safe('<blink>BÖÖM</blink>'))
self.assertEqual(result, '<blink>BÖÖM</blink>')
self.assertIsInstance(result, SafeData)
result = block.clean(mark_safe(''))
self.assertEqual(result, '')
self.assertIsInstance(result, SafeData)
class TestMeta(unittest.TestCase):
def test_set_template_with_meta(self):
class HeadingBlock(blocks.CharBlock):
class Meta:
template = 'heading.html'
block = HeadingBlock()
self.assertEqual(block.meta.template, 'heading.html')
def test_set_template_with_constructor(self):
block = blocks.CharBlock(template='heading.html')
self.assertEqual(block.meta.template, 'heading.html')
def test_set_template_with_constructor_overrides_meta(self):
class HeadingBlock(blocks.CharBlock):
class Meta:
template = 'heading.html'
block = HeadingBlock(template='subheading.html')
self.assertEqual(block.meta.template, 'subheading.html')
def test_meta_nested_inheritance(self):
"""
Check that having a multi-level inheritance chain works
"""
class HeadingBlock(blocks.CharBlock):
class Meta:
template = 'heading.html'
test = 'Foo'
class SubHeadingBlock(HeadingBlock):
class Meta:
template = 'subheading.html'
block = SubHeadingBlock()
self.assertEqual(block.meta.template, 'subheading.html')
self.assertEqual(block.meta.test, 'Foo')
def test_meta_multi_inheritance(self):
"""
Check that multi-inheritance and Meta classes work together
"""
class LeftBlock(blocks.CharBlock):
class Meta:
template = 'template.html'
clash = 'the band'
label = 'Left block'
class RightBlock(blocks.CharBlock):
class Meta:
default = 'hello'
clash = 'the album'
label = 'Right block'
class ChildBlock(LeftBlock, RightBlock):
class Meta:
label = 'Child block'
block = ChildBlock()
# These should be directly inherited from the LeftBlock/RightBlock
self.assertEqual(block.meta.template, 'template.html')
self.assertEqual(block.meta.default, 'hello')
# This should be inherited from the LeftBlock, solving the collision,
# as LeftBlock comes first
self.assertEqual(block.meta.clash, 'the band')
# This should come from ChildBlock itself, ignoring the label on
# LeftBlock/RightBlock
self.assertEqual(block.meta.label, 'Child block')
class TestStructBlock(SimpleTestCase):
def test_initialisation(self):
block = blocks.StructBlock([
('title', blocks.CharBlock()),
('link', blocks.URLBlock()),
])
self.assertEqual(list(block.child_blocks.keys()), ['title', 'link'])
def test_initialisation_from_subclass(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
self.assertEqual(list(block.child_blocks.keys()), ['title', 'link'])
def test_initialisation_from_subclass_with_extra(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock([
('classname', blocks.CharBlock())
])
self.assertEqual(list(block.child_blocks.keys()), ['title', 'link', 'classname'])
def test_initialisation_with_multiple_subclassses(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class StyledLinkBlock(LinkBlock):
classname = blocks.CharBlock()
block = StyledLinkBlock()
self.assertEqual(list(block.child_blocks.keys()), ['title', 'link', 'classname'])
def test_initialisation_with_mixins(self):
"""
The order of fields of classes with multiple parent classes is slightly
surprising at first. Child fields are inherited in a bottom-up order,
by traversing the MRO in reverse. In the example below,
``StyledLinkBlock`` will have an MRO of::
[StyledLinkBlock, StylingMixin, LinkBlock, StructBlock, ...]
This will result in ``classname`` appearing *after* ``title`` and
``link`` in ``StyleLinkBlock`.child_blocks`, even though
``StylingMixin`` appeared before ``LinkBlock``.
"""
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class StylingMixin(blocks.StructBlock):
classname = blocks.CharBlock()
class StyledLinkBlock(StylingMixin, LinkBlock):
source = blocks.CharBlock()
block = StyledLinkBlock()
self.assertEqual(list(block.child_blocks.keys()),
['title', 'link', 'classname', 'source'])
def test_render(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
html = block.render(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
}))
expected_html = '\n'.join([
'<dl>',
'<dt>title</dt>',
'<dd>Wagtail site</dd>',
'<dt>link</dt>',
'<dd>http://www.wagtail.io</dd>',
'</dl>',
])
self.assertHTMLEqual(html, expected_html)
def test_get_api_representation_calls_same_method_on_fields_with_context(self):
"""
The get_api_representation method of a StructBlock should invoke
the block's get_api_representation method on each field and the
context should be passed on.
"""
class ContextBlock(blocks.CharBlock):
def get_api_representation(self, value, context=None):
return context[value]
class AuthorBlock(blocks.StructBlock):
language = ContextBlock()
author = ContextBlock()
block = AuthorBlock()
api_representation = block.get_api_representation(
{
'language': 'en',
'author': 'wagtail',
},
context={
'en': 'English',
'wagtail': 'Wagtail!'
}
)
self.assertDictEqual(
api_representation, {
'language': 'English',
'author': 'Wagtail!'
}
)
def test_render_unknown_field(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
html = block.render(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
'image': 10,
}))
self.assertIn('<dt>title</dt>', html)
self.assertIn('<dd>Wagtail site</dd>', html)
self.assertIn('<dt>link</dt>', html)
self.assertIn('<dd>http://www.wagtail.io</dd>', html)
# Don't render the extra item
self.assertNotIn('<dt>image</dt>', html)
def test_render_bound_block(self):
# the string representation of a bound block should be the value as rendered by
# the associated block
class SectionBlock(blocks.StructBlock):
title = blocks.CharBlock()
body = blocks.RichTextBlock()
block = SectionBlock()
struct_value = block.to_python({
'title': 'hello',
'body': '<b>world</b>',
})
body_bound_block = struct_value.bound_blocks['body']
expected = '<div class="rich-text"><b>world</b></div>'
self.assertEqual(str(body_bound_block), expected)
def test_get_form_context(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
context = block.get_form_context(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
}), prefix='mylink')
self.assertTrue(isinstance(context['children'], collections.OrderedDict))
self.assertEqual(len(context['children']), 2)
self.assertTrue(isinstance(context['children']['title'], blocks.BoundBlock))
self.assertEqual(context['children']['title'].value, "Wagtail site")
self.assertTrue(isinstance(context['children']['link'], blocks.BoundBlock))
self.assertEqual(context['children']['link'].value, 'http://www.wagtail.io')
self.assertEqual(context['block_definition'], block)
self.assertEqual(context['prefix'], 'mylink')
def test_render_form(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock(required=False)
link = blocks.URLBlock(required=False)
block = LinkBlock()
html = block.render_form(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
}), prefix='mylink')
self.assertIn('<div class="struct-block">', html)
self.assertIn('<div class="field char_field widget-text_input fieldname-title">', html)
self.assertIn('<label for="mylink-title">Title:</label>', html)
self.assertInHTML(
'<input id="mylink-title" name="mylink-title" placeholder="Title" type="text" value="Wagtail site" />', html
)
self.assertIn('<div class="field url_field widget-url_input fieldname-link">', html)
self.assertInHTML(
(
'<input id="mylink-link" name="mylink-link" placeholder="Link"'
' type="url" value="http://www.wagtail.io" />'
),
html
)
self.assertNotIn('<li class="required">', html)
def test_render_required_field_indicator(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock(required=True)
block = LinkBlock()
html = block.render_form(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
}), prefix='mylink')
self.assertIn('<li class="required">', html)
def test_render_form_unknown_field(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
html = block.render_form(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
'image': 10,
}), prefix='mylink')
self.assertInHTML(
(
'<input id="mylink-title" name="mylink-title" placeholder="Title"'
' type="text" value="Wagtail site" />'
),
html
)
self.assertInHTML(
(
'<input id="mylink-link" name="mylink-link" placeholder="Link" type="url"'
' value="http://www.wagtail.io" />'
),
html
)
# Don't render the extra field
self.assertNotIn('mylink-image', html)
def test_render_form_uses_default_value(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock(default="Torchbox")
link = blocks.URLBlock(default="http://www.torchbox.com")
block = LinkBlock()
html = block.render_form(block.to_python({}), prefix='mylink')
self.assertInHTML(
'<input id="mylink-title" name="mylink-title" placeholder="Title" type="text" value="Torchbox" />', html
)
self.assertInHTML(
(
'<input id="mylink-link" name="mylink-link" placeholder="Link"'
' type="url" value="http://www.torchbox.com" />'
),
html
)
def test_render_form_with_help_text(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
class Meta:
help_text = "Self-promotion is encouraged"
block = LinkBlock()
html = block.render_form(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
}), prefix='mylink')
self.assertIn('<div class="object-help help">Self-promotion is encouraged</div>', html)
# check it can be overridden in the block constructor
block = LinkBlock(help_text="Self-promotion is discouraged")
html = block.render_form(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
}), prefix='mylink')
self.assertIn('<div class="object-help help">Self-promotion is discouraged</div>', html)
def test_media_inheritance(self):
class ScriptedCharBlock(blocks.CharBlock):
media = forms.Media(js=['scripted_char_block.js'])
class LinkBlock(blocks.StructBlock):
title = ScriptedCharBlock(default="Torchbox")
link = blocks.URLBlock(default="http://www.torchbox.com")
block = LinkBlock()
self.assertIn('scripted_char_block.js', ''.join(block.all_media().render_js()))
def test_html_declaration_inheritance(self):
class CharBlockWithDeclarations(blocks.CharBlock):
def html_declarations(self):
return '<script type="text/x-html-template">hello world</script>'
class LinkBlock(blocks.StructBlock):
title = CharBlockWithDeclarations(default="Torchbox")
link = blocks.URLBlock(default="http://www.torchbox.com")
block = LinkBlock()
self.assertIn('<script type="text/x-html-template">hello world</script>', block.all_html_declarations())
def test_searchable_content(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = LinkBlock()
content = block.get_searchable_content(block.to_python({
'title': "Wagtail site",
'link': 'http://www.wagtail.io',
}))
self.assertEqual(content, ["Wagtail site"])
def test_value_from_datadict(self):
block = blocks.StructBlock([
('title', blocks.CharBlock()),
('link', blocks.URLBlock()),
])
struct_val = block.value_from_datadict({
'mylink-title': "Torchbox",
'mylink-link': "http://www.torchbox.com"
}, {}, 'mylink')
self.assertEqual(struct_val['title'], "Torchbox")
self.assertEqual(struct_val['link'], "http://www.torchbox.com")
self.assertTrue(isinstance(struct_val, blocks.StructValue))
self.assertTrue(isinstance(struct_val.bound_blocks['link'].block, blocks.URLBlock))
@unittest.skipIf(django.VERSION < (1, 10, 2), "value_omitted_from_data is not available")
def test_value_omitted_from_data(self):
block = blocks.StructBlock([
('title', blocks.CharBlock()),
('link', blocks.URLBlock()),
])
# overall value is considered present in the form if any sub-field is present
self.assertFalse(block.value_omitted_from_data({'mylink-title': 'Torchbox'}, {}, 'mylink'))
self.assertTrue(block.value_omitted_from_data({'nothing-here': 'nope'}, {}, 'mylink'))
def test_default_is_returned_as_structvalue(self):
"""When returning the default value of a StructBlock (e.g. because it's
a child of another StructBlock, and the outer value is missing that key)
we should receive it as a StructValue, not just a plain dict"""
class PersonBlock(blocks.StructBlock):
first_name = blocks.CharBlock()
surname = blocks.CharBlock()
class EventBlock(blocks.StructBlock):
title = blocks.CharBlock()
guest_speaker = PersonBlock(default={'first_name': 'Ed', 'surname': 'Balls'})
event_block = EventBlock()
event = event_block.to_python({'title': 'Birthday party'})
self.assertEqual(event['guest_speaker']['first_name'], 'Ed')
self.assertTrue(isinstance(event['guest_speaker'], blocks.StructValue))
def test_clean(self):
block = blocks.StructBlock([
('title', blocks.CharBlock()),
('link', blocks.URLBlock()),
])
value = block.to_python({'title': 'Torchbox', 'link': 'http://www.torchbox.com/'})
clean_value = block.clean(value)
self.assertTrue(isinstance(clean_value, blocks.StructValue))
self.assertEqual(clean_value['title'], 'Torchbox')
value = block.to_python({'title': 'Torchbox', 'link': 'not a url'})
with self.assertRaises(ValidationError):
block.clean(value)
def test_bound_blocks_are_available_on_template(self):
"""
Test that we are able to use value.bound_blocks within templates
to access a child block's own HTML rendering
"""
block = SectionBlock()
value = block.to_python({'title': 'Hello', 'body': '<i>italic</i> world'})
result = block.render(value)
self.assertEqual(result, """<h1>Hello</h1><div class="rich-text"><i>italic</i> world</div>""")
def test_render_block_with_extra_context(self):
block = SectionBlock()
value = block.to_python({'title': 'Bonjour', 'body': 'monde <i>italique</i>'})
result = block.render(value, context={'language': 'fr'})
self.assertEqual(result, """<h1 lang="fr">Bonjour</h1><div class="rich-text">monde <i>italique</i></div>""")
def test_render_structvalue(self):
"""
The string representation of a StructValue should use the block's template
"""
block = SectionBlock()
value = block.to_python({'title': 'Hello', 'body': '<i>italic</i> world'})
result = str(value)
self.assertEqual(result, """<h1>Hello</h1><div class="rich-text"><i>italic</i> world</div>""")
# value.render_as_block() should be equivalent to str(value)
result = value.render_as_block()
self.assertEqual(result, """<h1>Hello</h1><div class="rich-text"><i>italic</i> world</div>""")
def test_render_structvalue_with_extra_context(self):
block = SectionBlock()
value = block.to_python({'title': 'Bonjour', 'body': 'monde <i>italique</i>'})
result = value.render_as_block(context={'language': 'fr'})
self.assertEqual(result, """<h1 lang="fr">Bonjour</h1><div class="rich-text">monde <i>italique</i></div>""")
class TestListBlock(WagtailTestUtils, SimpleTestCase):
def test_initialise_with_class(self):
block = blocks.ListBlock(blocks.CharBlock)
# Child block should be initialised for us
self.assertIsInstance(block.child_block, blocks.CharBlock)
def test_initialise_with_instance(self):
child_block = blocks.CharBlock()
block = blocks.ListBlock(child_block)
self.assertEqual(block.child_block, child_block)
def render(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock())
return block.render([
{
'title': "Wagtail",
'link': 'http://www.wagtail.io',
},
{
'title': "Django",
'link': 'http://www.djangoproject.com',
},
])
def test_render_uses_ul(self):
html = self.render()
self.assertIn('<ul>', html)
self.assertIn('</ul>', html)
def test_render_uses_li(self):
html = self.render()
self.assertIn('<li>', html)
self.assertIn('</li>', html)
def test_render_calls_block_render_on_children(self):
"""
The default rendering of a ListBlock should invoke the block's render method
on each child, rather than just outputting the child value as a string.
"""
block = blocks.ListBlock(
blocks.CharBlock(template='tests/blocks/heading_block.html')
)
html = block.render(["Hello world!", "Goodbye world!"])
self.assertIn('<h1>Hello world!</h1>', html)
self.assertIn('<h1>Goodbye world!</h1>', html)
def test_render_passes_context_to_children(self):
"""
Template context passed to the render method should be passed on
to the render method of the child block.
"""
block = blocks.ListBlock(
blocks.CharBlock(template='tests/blocks/heading_block.html')
)
html = block.render(["Bonjour le monde!", "Au revoir le monde!"], context={
'language': 'fr',
})
self.assertIn('<h1 lang="fr">Bonjour le monde!</h1>', html)
self.assertIn('<h1 lang="fr">Au revoir le monde!</h1>', html)
def test_get_api_representation_calls_same_method_on_children_with_context(self):
"""
The get_api_representation method of a ListBlock should invoke
the block's get_api_representation method on each child and
the context should be passed on.
"""
class ContextBlock(blocks.CharBlock):
def get_api_representation(self, value, context=None):
return context[value]
block = blocks.ListBlock(
ContextBlock()
)
api_representation = block.get_api_representation(["en", "fr"], context={
'en': 'Hello world!',
'fr': 'Bonjour le monde!'
})
self.assertEqual(
api_representation, ['Hello world!', 'Bonjour le monde!']
)
def render_form(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock)
html = block.render_form([
{
'title': "Wagtail",
'link': 'http://www.wagtail.io',
},
{
'title': "Django",
'link': 'http://www.djangoproject.com',
},
], prefix='links')
return html
def test_render_form_wrapper_class(self):
html = self.render_form()
self.assertIn('<div class="sequence-container sequence-type-list">', html)
def test_render_form_count_field(self):
html = self.render_form()
self.assertIn('<input type="hidden" name="links-count" id="links-count" value="2">', html)
def test_render_form_delete_field(self):
html = self.render_form()
self.assertIn('<input type="hidden" id="links-0-deleted" name="links-0-deleted" value="">', html)
def test_render_form_order_fields(self):
html = self.render_form()
self.assertIn('<input type="hidden" id="links-0-order" name="links-0-order" value="0">', html)
self.assertIn('<input type="hidden" id="links-1-order" name="links-1-order" value="1">', html)
def test_render_form_labels(self):
html = self.render_form()
self.assertIn('<label for="links-0-value-title">Title:</label>', html)
self.assertIn('<label for="links-0-value-link">Link:</label>', html)
def test_render_form_values(self):
html = self.render_form()
self.assertInHTML(
(
'<input id="links-0-value-title" name="links-0-value-title" placeholder="Title"'
' type="text" value="Wagtail" />'
),
html
)
self.assertInHTML(
(
'<input id="links-0-value-link" name="links-0-value-link" placeholder="Link" type="url"'
' value="http://www.wagtail.io" />'
),
html
)
self.assertInHTML(
(
'<input id="links-1-value-title" name="links-1-value-title" placeholder="Title" type="text"'
' value="Django" />'
),
html
)
self.assertInHTML(
(
'<input id="links-1-value-link" name="links-1-value-link" placeholder="Link"'
' type="url" value="http://www.djangoproject.com" />'
),
html
)
def test_html_declarations(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock)
html = block.html_declarations()
self.assertTagInTemplateScript(
'<input id="__PREFIX__-value-title" name="__PREFIX__-value-title" placeholder="Title" type="text" />',
html
)
self.assertTagInTemplateScript(
'<input id="__PREFIX__-value-link" name="__PREFIX__-value-link" placeholder="Link" type="url" />',
html
)
def test_html_declarations_uses_default(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock(default="Github")
link = blocks.URLBlock(default="http://www.github.com")
block = blocks.ListBlock(LinkBlock)
html = block.html_declarations()
self.assertTagInTemplateScript(
(
'<input id="__PREFIX__-value-title" name="__PREFIX__-value-title" placeholder="Title"'
' type="text" value="Github" />'
),
html
)
self.assertTagInTemplateScript(
(
'<input id="__PREFIX__-value-link" name="__PREFIX__-value-link" placeholder="Link"'
' type="url" value="http://www.github.com" />'
),
html
)
def test_media_inheritance(self):
class ScriptedCharBlock(blocks.CharBlock):
media = forms.Media(js=['scripted_char_block.js'])
block = blocks.ListBlock(ScriptedCharBlock())
self.assertIn('scripted_char_block.js', ''.join(block.all_media().render_js()))
def test_html_declaration_inheritance(self):
class CharBlockWithDeclarations(blocks.CharBlock):
def html_declarations(self):
return '<script type="text/x-html-template">hello world</script>'
block = blocks.ListBlock(CharBlockWithDeclarations())
self.assertIn('<script type="text/x-html-template">hello world</script>', block.all_html_declarations())
def test_searchable_content(self):
class LinkBlock(blocks.StructBlock):
title = blocks.CharBlock()
link = blocks.URLBlock()
block = blocks.ListBlock(LinkBlock())
content = block.get_searchable_content([
{
'title': "Wagtail",
'link': 'http://www.wagtail.io',
},
{
'title': "Django",
'link': 'http://www.djangoproject.com',
},
])
self.assertEqual(content, ["Wagtail", "Django"])
@unittest.skipIf(django.VERSION < (1, 10, 2), "value_omitted_from_data is not available")
def test_value_omitted_from_data(self):
block = blocks.ListBlock(blocks.CharBlock())
# overall value is considered present in the form if the 'count' field is present
self.assertFalse(block.value_omitted_from_data({'mylist-count': '0'}, {}, 'mylist'))
self.assertFalse(block.value_omitted_from_data({
'mylist-count': '1',
'mylist-0-value': 'hello', 'mylist-0-deleted': '', 'mylist-0-order': '0'
}, {}, 'mylist'))
self.assertTrue(block.value_omitted_from_data({'nothing-here': 'nope'}, {}, 'mylist'))
def test_ordering_in_form_submission_uses_order_field(self):
block = blocks.ListBlock(blocks.CharBlock())
# check that items are ordered by the 'order' field, not the order they appear in the form
post_data = {'shoppinglist-count': '3'}
for i in range(0, 3):
post_data.update({
'shoppinglist-%d-deleted' % i: '',
'shoppinglist-%d-order' % i: str(2 - i),
'shoppinglist-%d-value' % i: "item %d" % i
})
block_value = block.value_from_datadict(post_data, {}, 'shoppinglist')
self.assertEqual(block_value[2], "item 0")
def test_ordering_in_form_submission_is_numeric(self):
block = blocks.ListBlock(blocks.CharBlock())
# check that items are ordered by 'order' numerically, not alphabetically
post_data = {'shoppinglist-count': '12'}
for i in range(0, 12):
post_data.update({
'shoppinglist-%d-deleted' % i: '',
'shoppinglist-%d-order' % i: str(i),
'shoppinglist-%d-value' % i: "item %d" % i
})
block_value = block.value_from_datadict(post_data, {}, 'shoppinglist')
self.assertEqual(block_value[2], "item 2")
def test_can_specify_default(self):
class ShoppingListBlock(blocks.StructBlock):
shop = blocks.CharBlock()
items = blocks.ListBlock(blocks.CharBlock(), default=['peas', 'beans', 'carrots'])
block = ShoppingListBlock()
# the value here does not specify an 'items' field, so this should revert to the ListBlock's default
form_html = block.render_form(block.to_python({'shop': 'Tesco'}), prefix='shoppinglist')
self.assertIn(
'<input type="hidden" name="shoppinglist-items-count" id="shoppinglist-items-count" value="3">',
form_html
)
self.assertIn('value="peas"', form_html)
def test_default_default(self):
"""
if no explicit 'default' is set on the ListBlock, it should fall back on
a single instance of the child block in its default state.
"""
class ShoppingListBlock(blocks.StructBlock):
shop = blocks.CharBlock()
items = blocks.ListBlock(blocks.CharBlock(default='chocolate'))
block = ShoppingListBlock()
# the value here does not specify an 'items' field, so this should revert to the ListBlock's default
form_html = block.render_form(block.to_python({'shop': 'Tesco'}), prefix='shoppinglist')
self.assertIn(
'<input type="hidden" name="shoppinglist-items-count" id="shoppinglist-items-count" value="1">',
form_html
)
self.assertIn('value="chocolate"', form_html)
class TestStreamBlock(WagtailTestUtils, SimpleTestCase):
def test_initialisation(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock()),
('paragraph', blocks.CharBlock()),
])
self.assertEqual(list(block.child_blocks.keys()), ['heading', 'paragraph'])
def test_initialisation_with_binary_string_names(self):
# migrations will sometimes write out names as binary strings, just to keep us on our toes
block = blocks.StreamBlock([
(b'heading', blocks.CharBlock()),
(b'paragraph', blocks.CharBlock()),
])
self.assertEqual(list(block.child_blocks.keys()), [b'heading', b'paragraph'])
def test_initialisation_from_subclass(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
self.assertEqual(list(block.child_blocks.keys()), ['heading', 'paragraph'])
def test_initialisation_from_subclass_with_extra(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock([
('intro', blocks.CharBlock())
])
self.assertEqual(list(block.child_blocks.keys()), ['heading', 'paragraph', 'intro'])
def test_initialisation_with_multiple_subclassses(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class ArticleWithIntroBlock(ArticleBlock):
intro = blocks.CharBlock()
block = ArticleWithIntroBlock()
self.assertEqual(list(block.child_blocks.keys()), ['heading', 'paragraph', 'intro'])
def test_initialisation_with_mixins(self):
"""
The order of child blocks of ``StreamBlock``\s with multiple parent
classes is slightly surprising at first. Child blocks are inherited in
a bottom-up order, by traversing the MRO in reverse. In the example
below, ``ArticleWithIntroBlock`` will have an MRO of::
[ArticleWithIntroBlock, IntroMixin, ArticleBlock, StreamBlock, ...]
This will result in ``intro`` appearing *after* ``heading`` and
``paragraph`` in ``ArticleWithIntroBlock.child_blocks``, even though
``IntroMixin`` appeared before ``ArticleBlock``.
"""
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class IntroMixin(blocks.StreamBlock):
intro = blocks.CharBlock()
class ArticleWithIntroBlock(IntroMixin, ArticleBlock):
by_line = blocks.CharBlock()
block = ArticleWithIntroBlock()
self.assertEqual(list(block.child_blocks.keys()),
['heading', 'paragraph', 'intro', 'by_line'])
def render_article(self, data):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.RichTextBlock()
block = ArticleBlock()
value = block.to_python(data)
return block.render(value)
def test_get_api_representation_calls_same_method_on_children_with_context(self):
"""
The get_api_representation method of a StreamBlock should invoke
the block's get_api_representation method on each child and
the context should be passed on.
"""
class ContextBlock(blocks.CharBlock):
def get_api_representation(self, value, context=None):
return context[value]
block = blocks.StreamBlock([
('language', ContextBlock()),
('author', ContextBlock()),
])
api_representation = block.get_api_representation(
block.to_python([
{'type': 'language', 'value': 'en'},
{'type': 'author', 'value': 'wagtail'},
]),
context={
'en': 'English',
'wagtail': 'Wagtail!'
}
)
self.assertListEqual(
api_representation, [
{'type': 'language', 'value': 'English'},
{'type': 'author', 'value': 'Wagtail!'},
]
)
def test_render(self):
html = self.render_article([
{
'type': 'heading',
'value': "My title",
},
{
'type': 'paragraph',
'value': 'My <i>first</i> paragraph',
},
{
'type': 'paragraph',
'value': 'My second paragraph',
},
])
self.assertIn('<div class="block-heading">My title</div>', html)
self.assertIn('<div class="block-paragraph"><div class="rich-text">My <i>first</i> paragraph</div></div>', html)
self.assertIn('<div class="block-paragraph"><div class="rich-text">My second paragraph</div></div>', html)
def test_render_unknown_type(self):
# This can happen if a developer removes a type from their StreamBlock
html = self.render_article([
{
'type': 'foo',
'value': "Hello",
},
{
'type': 'paragraph',
'value': 'My first paragraph',
},
])
self.assertNotIn('foo', html)
self.assertNotIn('Hello', html)
self.assertIn('<div class="block-paragraph"><div class="rich-text">My first paragraph</div></div>', html)
def test_render_calls_block_render_on_children(self):
"""
The default rendering of a StreamBlock should invoke the block's render method
on each child, rather than just outputting the child value as a string.
"""
block = blocks.StreamBlock([
('heading', blocks.CharBlock(template='tests/blocks/heading_block.html')),
('paragraph', blocks.CharBlock()),
])
value = block.to_python([
{'type': 'heading', 'value': 'Hello'}
])
html = block.render(value)
self.assertIn('<div class="block-heading"><h1>Hello</h1></div>', html)
# calling render_as_block() on value (a StreamValue instance)
# should be equivalent to block.render(value)
html = value.render_as_block()
self.assertIn('<div class="block-heading"><h1>Hello</h1></div>', html)
def test_render_passes_context_to_children(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock(template='tests/blocks/heading_block.html')),
('paragraph', blocks.CharBlock()),
])
value = block.to_python([
{'type': 'heading', 'value': 'Bonjour'}
])
html = block.render(value, context={
'language': 'fr',
})
self.assertIn('<div class="block-heading"><h1 lang="fr">Bonjour</h1></div>', html)
# calling render_as_block(context=foo) on value (a StreamValue instance)
# should be equivalent to block.render(value, context=foo)
html = value.render_as_block(context={
'language': 'fr',
})
self.assertIn('<div class="block-heading"><h1 lang="fr">Bonjour</h1></div>', html)
def test_render_on_stream_child_uses_child_template(self):
"""
Accessing a child element of the stream (giving a StreamChild object) and rendering it
should use the block template, not just render the value's string representation
"""
block = blocks.StreamBlock([
('heading', blocks.CharBlock(template='tests/blocks/heading_block.html')),
('paragraph', blocks.CharBlock()),
])
value = block.to_python([
{'type': 'heading', 'value': 'Hello'}
])
html = value[0].render()
self.assertEqual('<h1>Hello</h1>', html)
# StreamChild.__str__ should do the same
html = str(value[0])
self.assertEqual('<h1>Hello</h1>', html)
# and so should StreamChild.render_as_block
html = value[0].render_as_block()
self.assertEqual('<h1>Hello</h1>', html)
def test_can_pass_context_to_stream_child_template(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock(template='tests/blocks/heading_block.html')),
('paragraph', blocks.CharBlock()),
])
value = block.to_python([
{'type': 'heading', 'value': 'Bonjour'}
])
html = value[0].render(context={'language': 'fr'})
self.assertEqual('<h1 lang="fr">Bonjour</h1>', html)
# the same functionality should be available through the alias `render_as_block`
html = value[0].render_as_block(context={'language': 'fr'})
self.assertEqual('<h1 lang="fr">Bonjour</h1>', html)
def render_form(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
value = block.to_python([
{
'type': 'heading',
'value': "My title",
},
{
'type': 'paragraph',
'value': 'My first paragraph',
},
{
'type': 'paragraph',
'value': 'My second paragraph',
},
])
return block.render_form(value, prefix='myarticle')
def test_render_form_wrapper_class(self):
html = self.render_form()
self.assertIn('<div class="sequence-container sequence-type-stream">', html)
def test_render_form_count_field(self):
html = self.render_form()
self.assertIn('<input type="hidden" name="myarticle-count" id="myarticle-count" value="3">', html)
def test_render_form_delete_field(self):
html = self.render_form()
self.assertIn('<input type="hidden" id="myarticle-0-deleted" name="myarticle-0-deleted" value="">', html)
def test_render_form_order_fields(self):
html = self.render_form()
self.assertIn('<input type="hidden" id="myarticle-0-order" name="myarticle-0-order" value="0">', html)
self.assertIn('<input type="hidden" id="myarticle-1-order" name="myarticle-1-order" value="1">', html)
self.assertIn('<input type="hidden" id="myarticle-2-order" name="myarticle-2-order" value="2">', html)
def test_render_form_type_fields(self):
html = self.render_form()
self.assertIn('<input type="hidden" id="myarticle-0-type" name="myarticle-0-type" value="heading">', html)
self.assertIn('<input type="hidden" id="myarticle-1-type" name="myarticle-1-type" value="paragraph">', html)
self.assertIn('<input type="hidden" id="myarticle-2-type" name="myarticle-2-type" value="paragraph">', html)
def test_render_form_value_fields(self):
html = self.render_form()
self.assertInHTML(
(
'<input id="myarticle-0-value" name="myarticle-0-value" placeholder="Heading"'
' type="text" value="My title" />'
),
html
)
self.assertInHTML(
(
'<input id="myarticle-1-value" name="myarticle-1-value" placeholder="Paragraph"'
' type="text" value="My first paragraph" />'
),
html
)
self.assertInHTML(
(
'<input id="myarticle-2-value" name="myarticle-2-value" placeholder="Paragraph"'
' type="text" value="My second paragraph" />'
),
html
)
@unittest.skipIf(django.VERSION < (1, 10, 2), "value_omitted_from_data is not available")
def test_value_omitted_from_data(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock()),
])
# overall value is considered present in the form if the 'count' field is present
self.assertFalse(block.value_omitted_from_data({'mystream-count': '0'}, {}, 'mystream'))
self.assertFalse(block.value_omitted_from_data({
'mystream-count': '1',
'mystream-0-type': 'heading', 'mystream-0-value': 'hello',
'mystream-0-deleted': '', 'mystream-0-order': '0'
}, {}, 'mystream'))
self.assertTrue(block.value_omitted_from_data({'nothing-here': 'nope'}, {}, 'mystream'))
def test_validation_errors(self):
class ValidatedBlock(blocks.StreamBlock):
char = blocks.CharBlock()
url = blocks.URLBlock()
block = ValidatedBlock()
value = [
blocks.BoundBlock(
block=block.child_blocks['char'],
value='',
),
blocks.BoundBlock(
block=block.child_blocks['char'],
value='foo',
),
blocks.BoundBlock(
block=block.child_blocks['url'],
value='http://example.com/',
),
blocks.BoundBlock(
block=block.child_blocks['url'],
value='not a url',
),
]
with self.assertRaises(ValidationError) as catcher:
block.clean(value)
self.assertEqual(catcher.exception.params, {
0: ['This field is required.'],
3: ['Enter a valid URL.'],
})
def test_block_level_validation_renders_errors(self):
block = FooStreamBlock()
post_data = {'stream-count': '2'}
for i, value in enumerate(['bar', 'baz']):
post_data.update({
'stream-%d-deleted' % i: '',
'stream-%d-order' % i: str(i),
'stream-%d-type' % i: 'text',
'stream-%d-value' % i: value,
})
block_value = block.value_from_datadict(post_data, {}, 'stream')
with self.assertRaises(ValidationError) as catcher:
block.clean(block_value)
errors = ErrorList([
catcher.exception
])
self.assertInHTML(
format_html('<div class="help-block help-critical">{}</div>', FooStreamBlock.error),
block.render_form(block_value, prefix='stream', errors=errors))
def test_block_level_validation_render_no_errors(self):
block = FooStreamBlock()
post_data = {'stream-count': '3'}
for i, value in enumerate(['foo', 'bar', 'baz']):
post_data.update({
'stream-%d-deleted' % i: '',
'stream-%d-order' % i: str(i),
'stream-%d-type' % i: 'text',
'stream-%d-value' % i: value,
})
block_value = block.value_from_datadict(post_data, {}, 'stream')
try:
block.clean(block_value)
except ValidationError:
self.fail('Should have passed validation')
self.assertInHTML(
format_html('<div class="help-block help-critical">{}</div>', FooStreamBlock.error),
block.render_form(block_value, prefix='stream'),
count=0)
def test_html_declarations(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
html = block.html_declarations()
self.assertTagInTemplateScript('<input id="__PREFIX__-value" name="__PREFIX__-value" placeholder="Heading" type="text" />', html)
self.assertTagInTemplateScript(
'<input id="__PREFIX__-value" name="__PREFIX__-value" placeholder="Paragraph" type="text" />',
html
)
def test_html_declarations_uses_default(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock(default="Fish found on moon")
paragraph = blocks.CharBlock(default="Lorem ipsum dolor sit amet")
block = ArticleBlock()
html = block.html_declarations()
self.assertTagInTemplateScript(
(
'<input id="__PREFIX__-value" name="__PREFIX__-value" placeholder="Heading"'
' type="text" value="Fish found on moon" />'
),
html
)
self.assertTagInTemplateScript(
(
'<input id="__PREFIX__-value" name="__PREFIX__-value" placeholder="Paragraph" type="text"'
' value="Lorem ipsum dolor sit amet" />'
),
html
)
def test_media_inheritance(self):
class ScriptedCharBlock(blocks.CharBlock):
media = forms.Media(js=['scripted_char_block.js'])
class ArticleBlock(blocks.StreamBlock):
heading = ScriptedCharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
self.assertIn('scripted_char_block.js', ''.join(block.all_media().render_js()))
def test_html_declaration_inheritance(self):
class CharBlockWithDeclarations(blocks.CharBlock):
def html_declarations(self):
return '<script type="text/x-html-template">hello world</script>'
class ArticleBlock(blocks.StreamBlock):
heading = CharBlockWithDeclarations(default="Torchbox")
paragraph = blocks.CharBlock()
block = ArticleBlock()
self.assertIn('<script type="text/x-html-template">hello world</script>', block.all_html_declarations())
def test_ordering_in_form_submission_uses_order_field(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
# check that items are ordered by the 'order' field, not the order they appear in the form
post_data = {'article-count': '3'}
for i in range(0, 3):
post_data.update({
'article-%d-deleted' % i: '',
'article-%d-order' % i: str(2 - i),
'article-%d-type' % i: 'heading',
'article-%d-value' % i: "heading %d" % i
})
block_value = block.value_from_datadict(post_data, {}, 'article')
self.assertEqual(block_value[2].value, "heading 0")
def test_ordering_in_form_submission_is_numeric(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
# check that items are ordered by 'order' numerically, not alphabetically
post_data = {'article-count': '12'}
for i in range(0, 12):
post_data.update({
'article-%d-deleted' % i: '',
'article-%d-order' % i: str(i),
'article-%d-type' % i: 'heading',
'article-%d-value' % i: "heading %d" % i
})
block_value = block.value_from_datadict(post_data, {}, 'article')
self.assertEqual(block_value[2].value, "heading 2")
def test_searchable_content(self):
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
block = ArticleBlock()
value = block.to_python([
{
'type': 'heading',
'value': "My title",
},
{
'type': 'paragraph',
'value': 'My first paragraph',
},
{
'type': 'paragraph',
'value': 'My second paragraph',
},
])
content = block.get_searchable_content(value)
self.assertEqual(content, [
"My title",
"My first paragraph",
"My second paragraph",
])
def test_meta_default(self):
"""Test that we can specify a default value in the Meta of a StreamBlock"""
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class Meta:
default = [('heading', 'A default heading')]
# to access the default value, we retrieve it through a StructBlock
# from a struct value that's missing that key
class ArticleContainerBlock(blocks.StructBlock):
author = blocks.CharBlock()
article = ArticleBlock()
block = ArticleContainerBlock()
struct_value = block.to_python({'author': 'Bob'})
stream_value = struct_value['article']
self.assertTrue(isinstance(stream_value, blocks.StreamValue))
self.assertEqual(len(stream_value), 1)
self.assertEqual(stream_value[0].block_type, 'heading')
self.assertEqual(stream_value[0].value, 'A default heading')
def test_constructor_default(self):
"""Test that we can specify a default value in the constructor of a StreamBlock"""
class ArticleBlock(blocks.StreamBlock):
heading = blocks.CharBlock()
paragraph = blocks.CharBlock()
class Meta:
default = [('heading', 'A default heading')]
# to access the default value, we retrieve it through a StructBlock
# from a struct value that's missing that key
class ArticleContainerBlock(blocks.StructBlock):
author = blocks.CharBlock()
article = ArticleBlock(default=[('heading', 'A different default heading')])
block = ArticleContainerBlock()
struct_value = block.to_python({'author': 'Bob'})
stream_value = struct_value['article']
self.assertTrue(isinstance(stream_value, blocks.StreamValue))
self.assertEqual(len(stream_value), 1)
self.assertEqual(stream_value[0].block_type, 'heading')
self.assertEqual(stream_value[0].value, 'A different default heading')
def test_stream_value_equality(self):
block = blocks.StreamBlock([
('text', blocks.CharBlock()),
])
value1 = block.to_python([{'type': 'text', 'value': 'hello'}])
value2 = block.to_python([{'type': 'text', 'value': 'hello'}])
value3 = block.to_python([{'type': 'text', 'value': 'goodbye'}])
self.assertTrue(value1 == value2)
self.assertFalse(value1 != value2)
self.assertFalse(value1 == value3)
self.assertTrue(value1 != value3)
class TestPageChooserBlock(TestCase):
fixtures = ['test.json']
def test_serialize(self):
"""The value of a PageChooserBlock (a Page object) should serialize to an ID"""
block = blocks.PageChooserBlock()
christmas_page = Page.objects.get(slug='christmas')
self.assertEqual(block.get_prep_value(christmas_page), christmas_page.id)
# None should serialize to None
self.assertEqual(block.get_prep_value(None), None)
def test_deserialize(self):
"""The serialized value of a PageChooserBlock (an ID) should deserialize to a Page object"""
block = blocks.PageChooserBlock()
christmas_page = Page.objects.get(slug='christmas')
self.assertEqual(block.to_python(christmas_page.id), christmas_page)
# None should deserialize to None
self.assertEqual(block.to_python(None), None)
def test_form_render(self):
block = blocks.PageChooserBlock(help_text="pick a page, any page")
empty_form_html = block.render_form(None, 'page')
self.assertInHTML('<input id="page" name="page" placeholder="" type="hidden" />', empty_form_html)
self.assertIn('createPageChooser("page", ["wagtailcore.page"], null, false);', empty_form_html)
christmas_page = Page.objects.get(slug='christmas')
christmas_form_html = block.render_form(christmas_page, 'page')
expected_html = '<input id="page" name="page" placeholder="" type="hidden" value="%d" />' % christmas_page.id
self.assertInHTML(expected_html, christmas_form_html)
self.assertIn("pick a page, any page", christmas_form_html)
def test_form_render_with_target_model_default(self):
block = blocks.PageChooserBlock()
empty_form_html = block.render_form(None, 'page')
self.assertIn('createPageChooser("page", ["wagtailcore.page"], null, false);', empty_form_html)
def test_form_render_with_target_model_string(self):
block = blocks.PageChooserBlock(help_text="pick a page, any page", target_model='tests.SimplePage')
empty_form_html = block.render_form(None, 'page')
self.assertIn('createPageChooser("page", ["tests.simplepage"], null, false);', empty_form_html)
def test_form_render_with_target_model_literal(self):
block = blocks.PageChooserBlock(help_text="pick a page, any page", target_model=SimplePage)
empty_form_html = block.render_form(None, 'page')
self.assertIn('createPageChooser("page", ["tests.simplepage"], null, false);', empty_form_html)
def test_form_render_with_target_model_multiple_strings(self):
block = blocks.PageChooserBlock(help_text="pick a page, any page", target_model=['tests.SimplePage', 'tests.EventPage'])
empty_form_html = block.render_form(None, 'page')
self.assertIn('createPageChooser("page", ["tests.simplepage", "tests.eventpage"], null, false);', empty_form_html)
def test_form_render_with_target_model_multiple_literals(self):
block = blocks.PageChooserBlock(help_text="pick a page, any page", target_model=[SimplePage, EventPage])
empty_form_html = block.render_form(None, 'page')
self.assertIn('createPageChooser("page", ["tests.simplepage", "tests.eventpage"], null, false);', empty_form_html)
def test_form_render_with_can_choose_root(self):
block = blocks.PageChooserBlock(help_text="pick a page, any page", can_choose_root=True)
empty_form_html = block.render_form(None, 'page')
self.assertIn('createPageChooser("page", ["wagtailcore.page"], null, true);', empty_form_html)
def test_form_response(self):
block = blocks.PageChooserBlock()
christmas_page = Page.objects.get(slug='christmas')
value = block.value_from_datadict({'page': str(christmas_page.id)}, {}, 'page')
self.assertEqual(value, christmas_page)
empty_value = block.value_from_datadict({'page': ''}, {}, 'page')
self.assertEqual(empty_value, None)
def test_clean(self):
required_block = blocks.PageChooserBlock()
nonrequired_block = blocks.PageChooserBlock(required=False)
christmas_page = Page.objects.get(slug='christmas')
self.assertEqual(required_block.clean(christmas_page), christmas_page)
with self.assertRaises(ValidationError):
required_block.clean(None)
self.assertEqual(nonrequired_block.clean(christmas_page), christmas_page)
self.assertEqual(nonrequired_block.clean(None), None)
def test_target_model_default(self):
block = blocks.PageChooserBlock()
self.assertEqual(block.target_model, Page)
def test_target_model_string(self):
block = blocks.PageChooserBlock(target_model='tests.SimplePage')
self.assertEqual(block.target_model, SimplePage)
def test_target_model_literal(self):
block = blocks.PageChooserBlock(target_model=SimplePage)
self.assertEqual(block.target_model, SimplePage)
def test_target_model_multiple_strings(self):
block = blocks.PageChooserBlock(target_model=['tests.SimplePage', 'tests.EventPage'])
self.assertEqual(block.target_model, Page)
def test_target_model_multiple_literals(self):
block = blocks.PageChooserBlock(target_model=[SimplePage, EventPage])
self.assertEqual(block.target_model, Page)
def test_deconstruct_target_model_default(self):
block = blocks.PageChooserBlock()
self.assertEqual(block.deconstruct(), (
'wagtail.wagtailcore.blocks.PageChooserBlock',
(), {}))
def test_deconstruct_target_model_string(self):
block = blocks.PageChooserBlock(target_model='tests.SimplePage')
self.assertEqual(block.deconstruct(), (
'wagtail.wagtailcore.blocks.PageChooserBlock',
(), {'target_model': ['tests.SimplePage']}))
def test_deconstruct_target_model_literal(self):
block = blocks.PageChooserBlock(target_model=SimplePage)
self.assertEqual(block.deconstruct(), (
'wagtail.wagtailcore.blocks.PageChooserBlock',
(), {'target_model': ['tests.SimplePage']}))
def test_deconstruct_target_model_multiple_strings(self):
block = blocks.PageChooserBlock(target_model=['tests.SimplePage', 'tests.EventPage'])
self.assertEqual(block.deconstruct(), (
'wagtail.wagtailcore.blocks.PageChooserBlock',
(), {'target_model': ['tests.SimplePage', 'tests.EventPage']}))
def test_deconstruct_target_model_multiple_literals(self):
block = blocks.PageChooserBlock(target_model=[SimplePage, EventPage])
self.assertEqual(block.deconstruct(), (
'wagtail.wagtailcore.blocks.PageChooserBlock',
(), {'target_model': ['tests.SimplePage', 'tests.EventPage']}))
class TestStaticBlock(unittest.TestCase):
def test_render_form_with_constructor(self):
block = blocks.StaticBlock(
admin_text="Latest posts - This block doesn't need to be configured, it will be displayed automatically",
template='tests/blocks/posts_static_block.html')
rendered_html = block.render_form(None)
self.assertEqual(rendered_html, "Latest posts - This block doesn't need to be configured, it will be displayed automatically")
def test_render_form_with_subclass(self):
class PostsStaticBlock(blocks.StaticBlock):
class Meta:
admin_text = "Latest posts - This block doesn't need to be configured, it will be displayed automatically"
template = "tests/blocks/posts_static_block.html"
block = PostsStaticBlock()
rendered_html = block.render_form(None)
self.assertEqual(rendered_html, "Latest posts - This block doesn't need to be configured, it will be displayed automatically")
def test_render_form_with_subclass_displays_default_text_if_no_admin_text(self):
class LabelOnlyStaticBlock(blocks.StaticBlock):
class Meta:
label = "Latest posts"
block = LabelOnlyStaticBlock()
rendered_html = block.render_form(None)
self.assertEqual(rendered_html, "Latest posts: this block has no options.")
def test_render_form_with_subclass_displays_default_text_if_no_admin_text_and_no_label(self):
class NoMetaStaticBlock(blocks.StaticBlock):
pass
block = NoMetaStaticBlock()
rendered_html = block.render_form(None)
self.assertEqual(rendered_html, "This block has no options.")
def test_render_form_works_with_mark_safe(self):
block = blocks.StaticBlock(
admin_text=mark_safe("<b>Latest posts</b> - This block doesn't need to be configured, it will be displayed automatically"),
template='tests/blocks/posts_static_block.html')
rendered_html = block.render_form(None)
self.assertEqual(rendered_html, "<b>Latest posts</b> - This block doesn't need to be configured, it will be displayed automatically")
def test_get_default(self):
block = blocks.StaticBlock()
default_value = block.get_default()
self.assertEqual(default_value, None)
def test_render(self):
block = blocks.StaticBlock(template='tests/blocks/posts_static_block.html')
result = block.render(None)
self.assertEqual(result, '<p>PostsStaticBlock template</p>')
def test_serialize(self):
block = blocks.StaticBlock()
result = block.get_prep_value(None)
self.assertEqual(result, None)
def test_deserialize(self):
block = blocks.StaticBlock()
result = block.to_python(None)
self.assertEqual(result, None)
class TestDateBlock(TestCase):
def test_render_form(self):
block = blocks.DateBlock()
value = date(2015, 8, 13)
result = block.render_form(value, prefix='dateblock')
# we should see the JS initialiser code:
# <script>initDateChooser("dateblock", {"dayOfWeekStart": 0, "format": "Y-m-d"});</script>
# except that we can't predict the order of the config options
self.assertIn('<script>initDateChooser("dateblock", {', result)
self.assertIn('"dayOfWeekStart": 0', result)
self.assertIn('"format": "Y-m-d"', result)
self.assertInHTML(
'<input id="dateblock" name="dateblock" placeholder="" type="text" value="2015-08-13" />',
result
)
def test_render_form_with_format(self):
block = blocks.DateBlock(format='%d.%m.%Y')
value = date(2015, 8, 13)
result = block.render_form(value, prefix='dateblock')
self.assertIn('<script>initDateChooser("dateblock", {', result)
self.assertIn('"dayOfWeekStart": 0', result)
self.assertIn('"format": "d.m.Y"', result)
self.assertInHTML(
'<input id="dateblock" name="dateblock" placeholder="" type="text" value="13.08.2015" />',
result
)
class TestDateTimeBlock(TestCase):
def test_render_form_with_format(self):
block = blocks.DateTimeBlock(format='%d.%m.%Y %H:%M')
value = datetime(2015, 8, 13, 10, 0)
result = block.render_form(value, prefix='datetimeblock')
self.assertIn(
'"format": "d.m.Y H:i"',
result
)
self.assertInHTML(
'<input id="datetimeblock" name="datetimeblock" placeholder="" type="text" value="13.08.2015 10:00" />',
result
)
class TestSystemCheck(TestCase):
def test_name_must_be_nonempty(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock()),
('', blocks.RichTextBlock()),
])
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'wagtailcore.E001')
self.assertEqual(errors[0].hint, "Block name cannot be empty")
self.assertEqual(errors[0].obj, block.child_blocks[''])
def test_name_cannot_contain_spaces(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock()),
('rich text', blocks.RichTextBlock()),
])
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'wagtailcore.E001')
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, block.child_blocks['rich text'])
def test_name_cannot_contain_dashes(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock()),
('rich-text', blocks.RichTextBlock()),
])
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'wagtailcore.E001')
self.assertEqual(errors[0].hint, "Block names cannot contain dashes")
self.assertEqual(errors[0].obj, block.child_blocks['rich-text'])
def test_name_cannot_begin_with_digit(self):
block = blocks.StreamBlock([
('heading', blocks.CharBlock()),
('99richtext', blocks.RichTextBlock()),
])
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'wagtailcore.E001')
self.assertEqual(errors[0].hint, "Block names cannot begin with a digit")
self.assertEqual(errors[0].obj, block.child_blocks['99richtext'])
def test_system_checks_recurse_into_lists(self):
failing_block = blocks.RichTextBlock()
block = blocks.StreamBlock([
('paragraph_list', blocks.ListBlock(
blocks.StructBlock([
('heading', blocks.CharBlock()),
('rich text', failing_block),
])
))
])
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'wagtailcore.E001')
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, failing_block)
def test_system_checks_recurse_into_streams(self):
failing_block = blocks.RichTextBlock()
block = blocks.StreamBlock([
('carousel', blocks.StreamBlock([
('text', blocks.StructBlock([
('heading', blocks.CharBlock()),
('rich text', failing_block),
]))
]))
])
errors = block.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'wagtailcore.E001')
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, failing_block)
def test_system_checks_recurse_into_structs(self):
failing_block_1 = blocks.RichTextBlock()
failing_block_2 = blocks.RichTextBlock()
block = blocks.StreamBlock([
('two_column', blocks.StructBlock([
('left', blocks.StructBlock([
('heading', blocks.CharBlock()),
('rich text', failing_block_1),
])),
('right', blocks.StructBlock([
('heading', blocks.CharBlock()),
('rich text', failing_block_2),
]))
]))
])
errors = block.check()
self.assertEqual(len(errors), 2)
self.assertEqual(errors[0].id, 'wagtailcore.E001')
self.assertEqual(errors[0].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, failing_block_1)
self.assertEqual(errors[1].id, 'wagtailcore.E001')
self.assertEqual(errors[1].hint, "Block names cannot contain spaces")
self.assertEqual(errors[0].obj, failing_block_2)
class TestTemplateRendering(TestCase):
def test_render_with_custom_context(self):
block = CustomLinkBlock()
value = block.to_python({'title': 'Torchbox', 'url': 'http://torchbox.com/'})
context = {'classname': 'important'}
result = block.render(value, context)
self.assertEqual(result, '<a href="http://torchbox.com/" class="important">Torchbox</a>')
def test_render_with_custom_form_context(self):
block = CustomLinkBlock()
value = block.to_python({'title': 'Torchbox', 'url': 'http://torchbox.com/'})
result = block.render_form(value, prefix='my-link-block')
self.assertIn('data-prefix="my-link-block"', result)
self.assertIn('<p>Hello from get_form_context!</p>', result)
class TestIncludeBlockTag(TestCase):
def test_include_block_tag_with_boundblock(self):
"""
The include_block tag should be able to render a BoundBlock's template
while keeping the parent template's context
"""
block = blocks.CharBlock(template='tests/blocks/heading_block.html')
bound_block = block.bind('bonjour')
result = render_to_string('tests/blocks/include_block_test.html', {
'test_block': bound_block,
'language': 'fr',
})
self.assertIn('<body><h1 lang="fr">bonjour</h1></body>', result)
def test_include_block_tag_with_structvalue(self):
"""
The include_block tag should be able to render a StructValue's template
while keeping the parent template's context
"""
block = SectionBlock()
struct_value = block.to_python({'title': 'Bonjour', 'body': 'monde <i>italique</i>'})
result = render_to_string('tests/blocks/include_block_test.html', {
'test_block': struct_value,
'language': 'fr',
})
self.assertIn(
"""<body><h1 lang="fr">Bonjour</h1><div class="rich-text">monde <i>italique</i></div></body>""",
result
)
def test_include_block_tag_with_streamvalue(self):
"""
The include_block tag should be able to render a StreamValue's template
while keeping the parent template's context
"""
block = blocks.StreamBlock([
('heading', blocks.CharBlock(template='tests/blocks/heading_block.html')),
('paragraph', blocks.CharBlock()),
], template='tests/blocks/stream_with_language.html')
stream_value = block.to_python([
{'type': 'heading', 'value': 'Bonjour'}
])
result = render_to_string('tests/blocks/include_block_test.html', {
'test_block': stream_value,
'language': 'fr',
})
self.assertIn('<div class="heading" lang="fr"><h1 lang="fr">Bonjour</h1></div>', result)
def test_include_block_tag_with_plain_value(self):
"""
The include_block tag should be able to render a value without a render_as_block method
by just rendering it as a string
"""
result = render_to_string('tests/blocks/include_block_test.html', {
'test_block': 42,
})
self.assertIn('<body>42</body>', result)
def test_include_block_tag_with_filtered_value(self):
"""
The block parameter on include_block tag should support complex values including filters,
e.g. {% include_block foo|default:123 %}
"""
block = blocks.CharBlock(template='tests/blocks/heading_block.html')
bound_block = block.bind('bonjour')
result = render_to_string('tests/blocks/include_block_test_with_filter.html', {
'test_block': bound_block,
'language': 'fr',
})
self.assertIn('<body><h1 lang="fr">bonjour</h1></body>', result)
result = render_to_string('tests/blocks/include_block_test_with_filter.html', {
'test_block': None,
'language': 'fr',
})
self.assertIn('<body>999</body>', result)
def test_include_block_tag_with_extra_context(self):
"""
Test that it's possible to pass extra context on an include_block tag using
{% include_block foo with classname="bar" %}
"""
block = blocks.CharBlock(template='tests/blocks/heading_block.html')
bound_block = block.bind('bonjour')
result = render_to_string('tests/blocks/include_block_with_test.html', {
'test_block': bound_block,
'language': 'fr',
})
self.assertIn('<body><h1 lang="fr" class="important">bonjour</h1></body>', result)
def test_include_block_tag_with_only_flag(self):
"""
A tag such as {% include_block foo with classname="bar" only %}
should not inherit the parent context
"""
block = blocks.CharBlock(template='tests/blocks/heading_block.html')
bound_block = block.bind('bonjour')
result = render_to_string('tests/blocks/include_block_only_test.html', {
'test_block': bound_block,
'language': 'fr',
})
self.assertIn('<body><h1 class="important">bonjour</h1></body>', result)
| Toshakins/wagtail | wagtail/wagtailcore/tests/test_blocks.py | Python | bsd-3-clause | 106,304 |
import shapefile, sys
import Image, ImageDraw
import argparse
def longest_len(list):
return len(max(list, key=len))
def clear_file():
with open(args['output'] + '.txt', 'w') as text_file:
text_file.write('')
def print_or_fout(str):
if args['output'] != False:
with open(args['output'] + '.txt', 'a') as text_file:
text_file.write(str)
else:
sys.stdout.write(str)
def show_attribute_columns(r,tab_lens=False):
fields=[y[0] for y in r.fields if y[0] !='DeletionFlag']
if (tab_lens == False):
tab_lens = [len(x)+2 for x in fields]
for i, f in enumerate(fields):
print_or_fout ((f+"\t").expandtabs(tab_lens[i]))
print_or_fout("\n")
def show_attribute_table(r):
recs = r.records()
recs.append([y[0] for y in r.fields if y[0] !='DeletionFlag'])
tab_lens = [longest_len(y)+2 for y in [list(x) for x in zip(*recs)]]
show_attribute_columns(r,tab_lens)
for rec in r.records():
for i, item in enumerate(rec):
print_or_fout((item+"\t").expandtabs(tab_lens[i]))
print_or_fout("\n")
def process(r):
iwidth = args['width']
# end if
filter_idxs = []
groupby_idxs = []
if len(args['filter']):
filter_keys = [x[0] for x in args['filter'] if x[0][0] != '[']
groupby_keys = [x[0][1:-1] for x in args['filter'] if x[0][0] == '[']
vals = [x[1] for x in args['filter'] if x[0][0] != '[' ]
fields=[y[0] for y in r.fields]
filter_idxs =[fields.index(x)-1 for x in filter_keys if x in fields]
groupby_idxs =[fields.index(x)-1 for x in groupby_keys if x in fields]
if len(groupby_idxs) >0: #group by active
group_bys = list(set([x[y] for x in r.records() for y in groupby_idxs if x[y].replace(' ','') != ''])) #unique values in group by clause
else:
group_bys = ['']
else:
group_bys = ['']
# end if
# end if
for group_by in group_bys:
feature_counter = 0
if group_by != '':
sys.stdout.write ("group by "+group_by +'.....')
xys=[]
boxes = []
for sr in r.shapeRecords():
if len(filter_idxs)>0:
n = len([i for i,x in enumerate(filter_idxs) if sr.record[x]==vals[i]])
if n ==0:
continue
# end if
# end if
if group_by != '':
n = len([i for i,x in enumerate(groupby_idxs) if sr.record[x]==group_by])
if n ==0:
continue
# end if
# end if
feature_counter = feature_counter + 1
shape=sr.shape
boxes.append(shape.bbox)
xy = []
for x,y in shape.points:
xy.append((x,y))
xys.append(xy)
print "features: "+str(feature_counter)
nbbox = boxes[0]
for bbox in boxes:
if bbox[2] > nbbox[2]:
# end if
nbbox[2] = bbox[2]
# end if
if bbox[3] > nbbox[3]:
# end if
nbbox[3] = bbox[3]
# end if
if bbox[0] < nbbox[0]:
# end if
nbbox[0] = bbox[0]
# end if
if bbox[1] < nbbox[1]:
# end if
nbbox[1] = bbox[1]
# end if
# end for
bbox=nbbox
xdist = bbox[2] - bbox[0]
ydist = bbox[3] - bbox[1]
ratio=xdist/ydist
iheight = int(iwidth/ratio)
xratio = iwidth/xdist
yratio = iheight/ydist
img = Image.new("RGB", (iwidth, iheight), "white")
img2 = Image.new("RGB", (iwidth, iheight), "white")
transparent_area = (0,0,iwidth,iheight)
mask=Image.new('L', (iwidth, iheight), color=255)
draw=ImageDraw.Draw(mask)
draw.rectangle(transparent_area, fill=0)
img.putalpha(mask)
img2.putalpha(mask)
draw = ImageDraw.Draw(img)
draw2 = ImageDraw.Draw(img2)
for pts in xys:
pixels2 = []
for x,y in pts:
px = int(iwidth - ((bbox[2] - x) * xratio))
py = int((bbox[3] - y) * yratio)
pixels2.append((px,py))
draw.polygon(pixels2, outline=args['stroke'], fill=args['fill'])
if args['outline'] != False:
draw2.polygon(pixels2, outline=args['outline'])
img.save(args['output']+"_"+group_by+"_polygon.png")
if args['outline'] != False:
img2.save(args['output']+"_"+group_by+"_lines.png")
# end if
parser = argparse.ArgumentParser(description="Produce .png image from shapefile. Optionally filter by attribute value(s).")
parser.add_argument('input' ,help='input filename (without .shp)')
parser.add_argument('-o','--output', default=False, required=False, help='output filename (without .png)')
parser.add_argument('-w', '--width', type=int, default=4333)
parser.add_argument('--fill', type=str, default='rgb(255, 255, 255)', help="polygon fill color. defaults to \"rgb(255, 255, 255)\"")
parser.add_argument('--stroke', type=str, default='rgb(255, 255, 255)', help="polygon stroke color. defaults to \"rgb(255, 255, 255)\"")
parser.add_argument('--outline', type=str, nargs="?", default=False, help="show outline. takes optional color parameter. defaults to \"rgb(0, 0, 0)\"")
parser.add_argument('-f', '--filter', nargs='*', action='append', default=[], help='include features matched by this key-value pair')
parser.add_argument('--fields', default=False, action='store_true', help = 'output the attribute fields')
parser.add_argument('--table', default=False, action='store_true', help = 'output the attribute table')
args = vars(parser.parse_args())
if args['outline'] == False:
args['outline'] = 'rgb(0, 0, 0)'
myshp = open(args['input']+".shp", "rb")
mydbf = open(args['input']+".dbf", "rb")
r = shapefile.Reader(shp=myshp, shx=None, dbf=mydbf)
if args['fields'] or args['table']:
if args['output']:
clear_file()
if args['table']==False:
tab_len = longest_len(fields=[y[0] for y in r.fields if y[0] !='DeletionFlag'])
else:
tab_len = longest_len([x for rec in r.records() for x in rec])
if args['fields']:
show_attribute_columns(r)
if args['table']:
show_attribute_table(r)
else:
if args['output'] == False:
parser.error('argument -o/--output is required')
else:
process(r) | mfurlend/shp2png | shp2png.py | Python | apache-2.0 | 6,022 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'The NBDiff Team'
__email__ = 'tavisharmstrong@gmail.com'
__version__ = '1.0.4'
| tarmstrong/nbdiff | nbdiff/__init__.py | Python | mit | 140 |
#!/usr/bin/env python
# encoding: utf-8
"""
@version: python.3.6
@author: zhangjiaheng
@software: PyCharm
@time: 2017/9/19 14:17
"""
import unittest,sys
from test_case.page_obj import login_page
from models import myunit,function
from time import sleep
class TestLogin(myunit.MyTest):
'''登录'''
def test_login(self,username="18701016443",password="18701016443"):
po = login_page.LoginPage(self.driver)
po.open()
po.loginshow()
po.changeloginbyup()
po.username(username)
po.password(password)
po.imagecode1()
po.loginbyupdo()
sleep(3)
assert po.login_sucess() == "哈哈哈哈哈"
function.insert_img(self.driver, "login.png")
print(po.login_sucess())
sleep(3)
if __name__ == "__main__":
unittest.main()
| 18701016443/mayi | mayi/test_case/test_login.py | Python | mit | 829 |
from .. import Provider as InternetProvider
class Provider(InternetProvider):
user_name_formats = (
'{{last_name}}.{{first_name_female}}',
'{{last_name}}.{{first_name_male}}',
'{{first_name_female}}.{{last_name}}',
'{{first_name_male}}.{{last_name}}',
'{{first_name}}##',
)
email_formats = ('{{user_name}}@{{free_email_domain}}', )
free_email_domains = (
'gmail.com', 'siol.net', 'email.si', 'volja.net',
)
uri_pages = (
'index', 'domov', 'iskanje', 'main', 'novica',
'homepage', 'kategorija', 'registracija', 'login',
'faq', 'o-nas', 'pogoji',
'zasebnost', 'avtor',
)
uri_paths = (
'app', 'main', 'wp-content', 'iskanje', 'kategorija', 'novica',
'kategorije', 'novice', 'blog', 'komentarji', 'seznam')
uri_extensions = (
'.html', '.html', '.html', '.htm', '.htm', '.php',
'.php', '.jsp', '.asp',
)
tlds = ('si', 'com')
| danhuss/faker | faker/providers/internet/sl_SI/__init__.py | Python | mit | 983 |
'''
New Integration Test for enabled nested virt
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
def test():
host = test_lib.lib_get_all_hosts_from_plan()[0]
cmd = 'cat /sys/module/kvm_intel/parameters/nested'
cmd_out = test_lib.lib_execute_ssh_cmd(host.managementIp_, host.username_, host.password_, cmd, 180)
if 'Y' not in cmd_out:
test_util.test_fail('nested virt not enabled')
cmd = 'cat /etc/modprobe.d/kvm-nested.conf'
cmd_out = test_lib.lib_execute_ssh_cmd(host.managementIp_, host.username_, host.password_, cmd, 180)
if 'options kvm_intel nested=1' not in cmd_out:
test_util.test_fail('nested virt not enabled in /etc/modprobe.d/kvm_nested.conf')
test_util.test_pass('Enable nested virt Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_util.test_logger('no clean up needed')
| zstackorg/zstack-woodpecker | integrationtest/vm/basic/test_nested_virt.py | Python | apache-2.0 | 985 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TagValue.data'
db.add_column('sentry_filtervalue', 'data',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'TagValue.data'
db.delete_column('sentry_filtervalue', 'data')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'sentry.affecteduserbygroup': {
'Meta': {'unique_together': "(('project', 'tuser', 'group'),)", 'object_name': 'AffectedUserByGroup'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'tuser': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.TrackedUser']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'users_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['auth.User']"})
},
'sentry.groupcountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouptag': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['auth.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'})
},
'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'user_added': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['auth.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'token_set'", 'to': "orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.TeamMember']", 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': "orm['auth.User']"})
},
'sentry.trackeduser': {
'Meta': {'unique_together': "(('project', 'ident'),)", 'object_name': 'TrackedUser'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Group']", 'through': "orm['sentry.AffectedUserByGroup']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'num_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry'] | beni55/sentry | src/sentry/migrations/0096_auto__add_field_tagvalue_data.py | Python | bsd-3-clause | 28,624 |
from __future__ import absolute_import
__all__ = ['ProviderManager']
from freight.exceptions import InvalidProvider
class ProviderManager(object):
def __init__(self):
self.providers = {}
def add(self, name, cls):
self.providers[name] = cls
def get(self, name, **kwargs):
try:
cls = self.providers[name]
except KeyError:
raise InvalidProvider(name)
return cls(**kwargs)
| klynton/freight | freight/providers/manager.py | Python | apache-2.0 | 451 |
import json
def getName(self):
return "JSON"
def getDescription(self):
return "Pretty print a JSON string"
def convert(self, stringToConvert):
textJ = json.loads(unicode(str(stringToConvert), errors='replace'))
output = str(json.dumps(textJ, indent=4))
return output
| mcancellieri/server-swiss-army-knife | py/src/plugins/JSON.py | Python | gpl-2.0 | 290 |
#!/usr/bin/python
import unittest
import time
from throttle import throttle
class TestThrottle(unittest.TestCase):
@throttle(1)
def increment(self):
""" Simple function that
increments a counter when
called, used to test the
debounce function decorator """
self.count += 1
def setUp(self):
self.count = 0
def test_throttle(self):
""" Test that the increment
function is being debounced.
Function should be used only once a second (and used at start)"""
self.assertTrue(self.count == 0)
self.increment()
self.assertTrue(self.count == 1)
self.increment()
self.increment()
self.increment()
self.increment()
self.increment()
time.sleep(0.25)
self.increment()
self.increment()
self.increment()
self.increment()
self.increment()
self.increment()
self.assertTrue(self.count == 1)
self.increment()
self.increment()
self.increment()
self.increment()
self.assertTrue(self.count == 1)
time.sleep(1)
self.assertTrue(self.count == 2)
time.sleep(10)
self.assertTrue(self.count == 2)
if __name__ == '__main__':
unittest.main()
| AdoHaha/jourapp | helperpython/test_throttle.py | Python | mit | 1,354 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_unregister_request(
resource_provider_namespace: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister')
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_register_request(
resource_provider_namespace: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register')
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
subscription_id: str,
*,
top: Optional[int] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
if expand is not None:
query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
resource_provider_namespace: str,
subscription_id: str,
*,
expand: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2018-05-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}')
path_format_arguments = {
"resourceProviderNamespace": _SERIALIZER.url("resource_provider_namespace", resource_provider_namespace, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class ProvidersOperations(object):
"""ProvidersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.resources.v2018_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def unregister(
self,
resource_provider_namespace: str,
**kwargs: Any
) -> "_models.Provider":
"""Unregisters a subscription from a resource provider.
:param resource_provider_namespace: The namespace of the resource provider to unregister.
:type resource_provider_namespace: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2018_05_01.models.Provider
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Provider"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_unregister_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
template_url=self.unregister.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Provider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
unregister.metadata = {'url': '/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister'} # type: ignore
@distributed_trace
def register(
self,
resource_provider_namespace: str,
**kwargs: Any
) -> "_models.Provider":
"""Registers a subscription with a resource provider.
:param resource_provider_namespace: The namespace of the resource provider to register.
:type resource_provider_namespace: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2018_05_01.models.Provider
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Provider"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_register_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
template_url=self.register.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Provider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
register.metadata = {'url': '/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register'} # type: ignore
@distributed_trace
def list(
self,
top: Optional[int] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> Iterable["_models.ProviderListResult"]:
"""Gets all resource providers for a subscription.
:param top: The number of results to return. If null is passed returns all deployments.
:type top: int
:param expand: The properties to include in the results. For example, use &$expand=metadata in
the query string to retrieve resource provider metadata. To include property aliases in
response, use $expand=resourceTypes/aliases.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ProviderListResult or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.resource.resources.v2018_05_01.models.ProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
top=top,
expand=expand,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
top=top,
expand=expand,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ProviderListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers'} # type: ignore
@distributed_trace
def get(
self,
resource_provider_namespace: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.Provider":
"""Gets the specified resource provider.
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param expand: The $expand query parameter. For example, to include property aliases in
response, use $expand=resourceTypes/aliases.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Provider, or the result of cls(response)
:rtype: ~azure.mgmt.resource.resources.v2018_05_01.models.Provider
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Provider"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_provider_namespace=resource_provider_namespace,
subscription_id=self._config.subscription_id,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Provider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2018_05_01/operations/_providers_operations.py | Python | mit | 16,244 |
# -*- coding: utf-8 -*-
"""
This example demonstrates the SpinBox widget, which is an extension of
QDoubleSpinBox providing some advanced features:
* SI-prefixed units
* Non-linear stepping modes
* Bounded/unbounded values
"""
import initExample ## Add path to library (just for examples; you do not need this)
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
import numpy as np
app = QtGui.QApplication([])
spins = [
("Floating-point spin box, min=0, no maximum.", pg.SpinBox(value=5.0, bounds=[0, None])),
("Integer spin box, dec stepping<br>(1-9, 10-90, 100-900, etc)", pg.SpinBox(value=10, int=True, dec=True, minStep=1, step=1)),
("Float with SI-prefixed units<br>(n, u, m, k, M, etc)", pg.SpinBox(value=0.9, suffix='V', siPrefix=True)),
("Float with SI-prefixed units,<br>dec step=0.1, minStep=0.1", pg.SpinBox(value=1.0, suffix='V', siPrefix=True, dec=True, step=0.1, minStep=0.1)),
("Float with SI-prefixed units,<br>dec step=0.5, minStep=0.01", pg.SpinBox(value=1.0, suffix='V', siPrefix=True, dec=True, step=0.5, minStep=0.01)),
("Float with SI-prefixed units,<br>dec step=1.0, minStep=0.001", pg.SpinBox(value=1.0, suffix='V', siPrefix=True, dec=True, step=1.0, minStep=0.001)),
]
win = QtGui.QMainWindow()
win.setWindowTitle('pyqtgraph example: SpinBox')
cw = QtGui.QWidget()
layout = QtGui.QGridLayout()
cw.setLayout(layout)
win.setCentralWidget(cw)
win.show()
#win.resize(300, 600)
changingLabel = QtGui.QLabel() ## updated immediately
changedLabel = QtGui.QLabel() ## updated only when editing is finished or mouse wheel has stopped for 0.3sec
changingLabel.setMinimumWidth(200)
font = changingLabel.font()
font.setBold(True)
font.setPointSize(14)
changingLabel.setFont(font)
changedLabel.setFont(font)
labels = []
def valueChanged(sb):
changedLabel.setText("Final value: %s" % str(sb.value()))
def valueChanging(sb, value):
changingLabel.setText("Value changing: %s" % str(sb.value()))
for text, spin in spins:
label = QtGui.QLabel(text)
labels.append(label)
layout.addWidget(label)
layout.addWidget(spin)
spin.sigValueChanged.connect(valueChanged)
spin.sigValueChanging.connect(valueChanging)
layout.addWidget(changingLabel, 0, 1)
layout.addWidget(changedLabel, 2, 1)
#def mkWin():
#win = QtGui.QMainWindow()
#g = QtGui.QFormLayout()
#w = QtGui.QWidget()
#w.setLayout(g)
#win.setCentralWidget(w)
#s1 = SpinBox(value=5, step=0.1, bounds=[-1.5, None], suffix='units')
#t1 = QtGui.QLineEdit()
#g.addRow(s1, t1)
#s2 = SpinBox(value=10e-6, dec=True, step=0.1, minStep=1e-6, suffix='A', siPrefix=True)
#t2 = QtGui.QLineEdit()
#g.addRow(s2, t2)
#s3 = SpinBox(value=1000, dec=True, step=0.5, minStep=1e-6, bounds=[1, 1e9], suffix='Hz', siPrefix=True)
#t3 = QtGui.QLineEdit()
#g.addRow(s3, t3)
#s4 = SpinBox(int=True, dec=True, step=1, minStep=1, bounds=[-10, 1000])
#t4 = QtGui.QLineEdit()
#g.addRow(s4, t4)
#win.show()
#import sys
#for sb in [s1, s2, s3,s4]:
##QtCore.QObject.connect(sb, QtCore.SIGNAL('valueChanged(double)'), lambda v: sys.stdout.write(str(sb) + " valueChanged\n"))
##QtCore.QObject.connect(sb, QtCore.SIGNAL('editingFinished()'), lambda: sys.stdout.write(str(sb) + " editingFinished\n"))
#sb.sigValueChanged.connect(valueChanged)
#sb.sigValueChanging.connect(valueChanging)
#sb.editingFinished.connect(lambda: sys.stdout.write(str(sb) + " editingFinished\n"))
#return win, w, [s1, s2, s3, s4]
#a = mkWin()
#def test(n=100):
#for i in range(n):
#win, w, sb = mkWin()
#for s in sb:
#w.setParent(None)
#s.setParent(None)
#s.valueChanged.disconnect()
#s.editingFinished.disconnect()
## Start Qt event loop unless running in interactive mode or using pyside.
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
| UpSea/thirdParty | pyqtgraph-0.9.10/examples/SpinBox.py | Python | mit | 4,046 |
# -*- coding: utf-8 -*-
# Copyright 2016-2021 The pyXem developers
#
# This file is part of pyXem.
#
# pyXem is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyXem is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyXem. If not, see <http://www.gnu.org/licenses/>.
"""Generating subpixel resolution on diffraction vectors."""
import numpy as np
from skimage import morphology
from skimage.measure import label
from scipy import ndimage as ndi
from scipy.ndimage.measurements import center_of_mass
from hyperspy.signals import BaseSignal
from pyxem.generators.subpixelrefinement_generator import _get_pixel_vectors
from pyxem.signals import DiffractionVectors
def _get_intensities(z, vectors, radius=1):
"""Basic intensity integration routine, takes the maximum value at the
given vector positions with the number of pixels given by `radius`.
Parameters
----------
vectors : DiffractionVectors
Vectors to the locations of the spots to be
integrated.
radius: int,
Number of pixels within which to find the largest maximum
Returns
-------
intensities : np.array
List of extracted intensities
"""
i, j = np.array(vectors.data).astype(int).T
if radius > 1:
footprint = morphology.disk(radius)
filtered = ndi.maximum_filter(z, footprint=footprint)
intensities = filtered[j, i].reshape(-1, 1) # note that the indices are flipped
else:
intensities = z[j, i].reshape(-1, 1) # note that the indices are flipped
return np.array(intensities)
def _take_ragged(z, indices, _axis=None, out=None, mode="raise"):
"""Like `np.take` for ragged arrays, see `np.take` for documentation."""
return np.take(z[0], indices, axis=_axis, out=out, mode=mode)
def _get_largest_connected_region(segmentation):
"""Take a binary segmentation image and return the largest connected area."""
labels = label(segmentation)
largest = np.argmax(np.bincount(labels.flat, weights=segmentation.flat))
return (labels == largest).astype(int)
def _get_intensities_summation_method(
z,
vectors,
box_inner: int = 7,
box_outer: int = 10,
n_min: int = 5,
n_max: int = None,
snr_thresh=3.0,
verbose: bool = False,
):
"""Integrate reflections using the summation method.
Two boxes are defined, the inner box is used to define the
integration area. The outer box is used to calculate the
average signal-to-noise ratio (SNR).
All pixels with a large enough SNR are considered to be signal. The largest region
of connected signal pixels are summed to calculate the reflection intensity.
Parameters
----------
vectors : DiffractionVectors
Vectors to the locations of the spots to be
integrated.
box_inner : int
Defines the radius (size) of the inner box, which must be larger than the reflection.
The total box size is 2*box_inner
box_outer : int
Defines radius (size) of the outer box. The total box size is 2*box_inner
The border between the inner and outer box is considered background
and used to calculate the (SNR) for each pixel: SNR = (I - <I>/std(I_bkg)).
snr_thresh : float
Minimum signal-to-noise for a pixel to be considered as `signal`.
n_min: int
If the number of SNR pixels in the inner box < n_min, the reflection is discared
n_max:
If the number of SNR pixels in the inner box >= n_max, the reflection is discareded
Defaults to the inner box size (`box_inner**2`.
verbose : bool
Print statistics for every reflection (for debugging)
Returns
-------
peaks : np.array
Array with 4 columns: X-position, Y-position, intensity, reflection SNR
Notes
-----
Implementation based on Barty et al, J. Appl. Cryst. (2014). 47, 1118-1131
Lesli, Acta Cryst. (2006). D62, 48-57
"""
if not n_max: # pragma: no cover
n_max = box_inner ** 2
peaks = []
for i, j in vectors:
box = z[j - box_inner : j + box_inner, i - box_inner : i + box_inner].copy()
bkg = np.hstack(
[
z[j - box_outer : j + box_outer, i - box_outer : i - box_inner].ravel(),
z[j - box_outer : j + box_outer, i + box_inner : i + box_outer].ravel(),
z[j - box_outer : j - box_inner, i - box_inner : i + box_inner].ravel(),
z[j + box_inner : j + box_outer, i - box_inner : i + box_inner].ravel(),
]
)
bkg_mean = bkg.mean()
bkg_std = bkg.std()
box_snr = (box - bkg_mean) / bkg_std
# get mask for signal (I > SNR)
signal_mask = _get_largest_connected_region(box_snr > snr_thresh)
n_pix = signal_mask.sum()
signal = (box - bkg_mean) * signal_mask
inty = signal.sum()
snr = (inty / n_pix) / bkg_std
sigma = inty / snr
# calculate center of mass
com_X, com_Y = center_of_mass(box, labels=signal_mask, index=1)
dX = com_X - box_inner
dY = com_Y - box_inner
X = i + dX
Y = j + dY
if verbose: # pragma: no cover
print(
f"\nMean(I): {bkg_mean:.2f} | Std(I): {bkg_std:.2f} | n_pix: {n_pix} \n"
f"I: {inty:.2f} | Sigma(I): {sigma:.2f} | SNR(I): {snr:.2f} | I/pix: {inty/n_pix:.2f} \n"
f"i: {i:.2f} | j: {j:.2f} | dX: {dX:.2f} | dY: {dY:.2f} | X: {X:.2f} | Y: {Y:.2f} "
)
# for debugging purposes
import matploltib.pyplot as plt
plt.imshow(signal)
plt.plot(dY + box_inner, dX + box_inner, "r+") # center_of_mass
plt.plot(box_inner, box_inner, "g+") # input
plt.show()
if n_pix > n_max: # pragma: no cover
continue
if n_pix < n_min: # pragma: no cover
continue
# for some reason X/Y are reversed here
peaks.append([Y, X, inty, sigma])
peaks = np.array(peaks)
return np.array(peaks)
class IntegrationGenerator:
"""Integrates reflections at the given vector positions.
Parameters
----------
dp : ElectronDiffraction2D
The electron diffraction patterns to be refined
vectors : DiffractionVectors | ndarray
Vectors (in calibrated units) to the locations of the spots to be
integrated. If given as DiffractionVectors, it must have the same
navigation shape as the electron diffraction patterns. If an ndarray,
the same set of vectors is mapped over all electron diffraction
patterns.
"""
def __init__(self, dp, vectors):
self.dp = dp
self.vectors_init = vectors
self.last_method = None
sig_ax = dp.axes_manager.signal_axes
self.calibration = [sig_ax[0].scale, sig_ax[1].scale]
self.center = [sig_ax[0].size / 2, sig_ax[1].size / 2]
self.vector_pixels = _get_pixel_vectors(
dp, vectors, calibration=self.calibration, center=self.center
)
def extract_intensities(self, radius: int = 1):
"""Basic intensity integration routine, takes the maximum value at the
given vector positions with the number of pixels given by `radius`.
Parameters
----------
radius: int,
Number of pixels within which to find the largest maximum
Returns
-------
intensities : :obj:`hyperspy.signals.BaseSignal`
List of extracted intensities
"""
intensities = self.dp.map(
_get_intensities, vectors=self.vector_pixels, radius=radius, inplace=False
)
intensities = BaseSignal(intensities)
intensities.axes_manager.set_signal_dimension(0)
return intensities
def extract_intensities_summation_method(
self,
box_inner: int = 7,
box_outer: int = 10,
n_min: int = 5,
n_max: int = 1000,
snr_thresh: float = 3.0,
):
"""Integrate reflections using the summation method. Two boxes are defined,
the inner box is used to define the integration area. The outer box is used
to calculate the average signal-to-noise ratio (SNR).
All pixels with a large enough SNR are considered to be signal. The largest region
of connected signal pixels are summed to calculate the reflection intensity. The
diffraction vectors are calculated as the center of mass of the signal pixels.
Parameters
----------
box_inner : int
Defines the size of the inner box, which must be larger than the reflection.
box_outer : int
Defines the size of the outer box. The border between the inner and outer
box is considered background and used to calculate the (SNR) for each
pixel: SNR = (I - <I>/std(I_bkg)).
snr_thresh : float
Minimum signal-to-noise for a pixel to be considered as `signal`.
n_min: int
If the number of SNR pixels in the inner box < n_min, the reflection is discared
n_max:
If the number of SNR pixels in the inner box > n_max, the reflection is discareded
verbose : bool
Print statistics for every reflection (for debugging)
Returns
-------
vectors : :obj:`pyxem.signals.diffraction_vectors.DiffractionVectors`
DiffractionVectors with optimized coordinates, where the attributes
vectors.intensities -> `I`, vectors.sigma -> `sigma(I)`, and
vectors.snr -> `I / sigma(I)`
Notes
-----
Implementation based on Barty et al, J. Appl. Cryst. (2014). 47, 1118-1131
Lesli, Acta Cryst. (2006). D62, 48-57
"""
result = self.dp.map(
_get_intensities_summation_method,
vectors=self.vector_pixels,
box_inner=box_inner,
box_outer=box_outer,
n_min=n_min,
n_max=n_max,
snr_thresh=snr_thresh,
inplace=False,
ragged=True,
)
peaks = result.map(
_take_ragged, indices=[0, 1], _axis=1, inplace=False, ragged=True
)
intensities = result.map(
_take_ragged, indices=2, _axis=1, inplace=False, ragged=True
)
sigma = result.map(_take_ragged, indices=3, _axis=1, inplace=False, ragged=True)
vectors = DiffractionVectors.from_peaks(
peaks, calibration=self.calibration, center=self.center
)
vectors.intensities = intensities
vectors.sigma = sigma
vectors.snr = intensities / sigma
return vectors
| pycrystem/pycrystem | pyxem/generators/integration_generator.py | Python | gpl-3.0 | 11,204 |
default_app_config = 'ofahrtbase.apps.OfahrtbaseConfig'
| d120/pyofahrt | ofahrtbase/__init__.py | Python | agpl-3.0 | 56 |
"""
WSGI config for mclarkpw project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
import sys
# Activate your virtual env
activate_env="/var/envs/mclarkpw/bin/activate_this.py"
execfile(activate_env, dict(__file__=activate_env))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mclarkpw.settings.production")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| m-clark/mclarkpw | mclarkpw/mclarkpw/production.wsgi.py | Python | mit | 551 |
# -*- coding: utf-8; mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4; truncate-lines: 0 -*-
# vi: set fileencoding=utf-8 filetype=python expandtab tabstop=4 shiftwidth=4 softtabstop=4 cindent:
# :mode=python:indentSize=4:tabSize=4:noTabs=true:
#-----------------------------------------------------------------------------#
# Built-in modules
from __future__ import print_function, absolute_import, unicode_literals
import re
#-----------------------------------------------------------------------------#
class reFlag(object):
def __init__(self, flag_name, short_flag, checkbox):
if not flag_name.startswith('re.'):
raise ValueError('Invalid flag name {!r}'.format(flag_name))
self.flagName = flag_name
self.reFlag = getattr(re, flag_name[3:])
self.shortFlag = short_flag
self.checkBox = checkbox
self.preEmbedState = None
return
def clear(self):
self.preEmbedState = None
self.checkBox.setEnabled(True)
self.checkBox.setChecked(False)
return
def embed(self):
"""Set the state of the checkbox to show that it
is set by the regexp text."""
if self.preEmbedState == None:
self.preEmbedState = self.checkBox.isChecked()
self.checkBox.setChecked(True)
self.checkBox.setDisabled(True)
return
def deembed(self):
if self.preEmbedState != None:
self.checkBox.setEnabled(True)
self.checkBox.setChecked(self.preEmbedState)
self.preEmbedState = None
return
class reFlagList(list):
def allFlagsORed(self):
ret = 0
for f in self:
if f.checkBox.isChecked():
ret |= f.reFlag
return ret
def clearAll(self):
for f in self:
f.clear()
return
#-----------------------------------------------------------------------------#
| luksan/kodos | modules/flags.py | Python | gpl-2.0 | 1,969 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2010, 2011, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Invenio bibsched task for uploading multiple documents
or metadata files. This task can run in two different modes:
metadata or documents.
The parent directory from where the folders metadata and
documents are expected to be found has to be specified
in the invenio config file.
"""
import os.path
__revision__ = "$Id$"
import sys
import os
import time
import tempfile
import shutil
from invenio.config import (CFG_TMPSHAREDDIR,
CFG_BATCHUPLOADER_DAEMON_DIR,
CFG_BATCHUPLOADER_FILENAME_MATCHING_POLICY,
CFG_PREFIX)
from invenio.legacy.bibsched.bibtask import (
task_init,
task_set_option,
task_get_option,
task_update_progress,
task_low_level_submission,
write_message,
task_sleep_now_if_required)
from invenio.legacy.batchuploader.engine import document_upload
def task_submit_elaborate_specific_parameter(key, value, opts, args):
""" Given the string key, checks its meaning and returns True if
has elaborated the key.
Possible keys:
"""
if key in ('-d', '--documents'):
task_set_option('documents', "documents")
return True
elif key in ('-m', '--metadata'):
task_set_option('metadata', "metadata")
return True
return False
def task_run_core():
""" Walks through all directories where metadata files are located
and uploads them.
Files are then moved to the corresponding DONE folders.
"""
daemon_dir = CFG_BATCHUPLOADER_DAEMON_DIR[0] == '/' and CFG_BATCHUPLOADER_DAEMON_DIR \
or CFG_PREFIX + '/' + CFG_BATCHUPLOADER_DAEMON_DIR
# Check if directory /batchupload exists
if not task_get_option('documents'):
# Metadata upload
parent_dir = daemon_dir + "/metadata/"
progress = 0
try:
os.makedirs(parent_dir)
except OSError:
pass
list_of_folders = ["insert",
"append",
"correct",
"replace",
"holdingpen"]
for folder in list_of_folders:
files_dir = os.path.join(parent_dir, folder)
files_done_dir = os.path.join(files_dir, "DONE")
try:
files = os.listdir(files_dir)
except OSError as e:
os.mkdir(files_dir)
files = []
write_message(e, sys.stderr)
write_message("Created new folder %s" % (files_dir,))
# Create directory DONE/ if doesn't exist
try:
os.mkdir(files_done_dir)
except OSError:
# Directory exists
pass
for metafile in files:
if os.path.isfile(os.path.join(files_dir, metafile)):
# Create temporary file to be uploaded
(fd, filename) = tempfile.mkstemp(prefix=metafile + "_" + time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_", dir=CFG_TMPSHAREDDIR)
shutil.copy(os.path.join(files_dir, metafile), filename)
# Send bibsched task
mode = "--" + folder
jobid = str(task_low_level_submission('bibupload', 'batchupload', mode, filename))
# Move file to done folder
filename = metafile + "_" + time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_" + jobid
os.rename(os.path.join(files_dir, metafile), os.path.join(files_done_dir, filename))
task_sleep_now_if_required(can_stop_too=True)
progress += 1
task_update_progress("Done %d out of %d." % (progress, len(list_of_folders)))
else:
# Documents upload
parent_dir = daemon_dir + "/documents/"
try:
os.makedirs(parent_dir)
except OSError:
pass
matching_order = CFG_BATCHUPLOADER_FILENAME_MATCHING_POLICY
for folder in ["append/", "revise/"]:
try:
os.mkdir(parent_dir + folder)
except:
pass
for matching in matching_order:
errors = document_upload(folder=parent_dir + folder, matching=matching, mode=folder[:-1])[0]
if not errors:
break # All documents succedeed with that matching
for error in errors:
write_message("File: %s - %s with matching %s" % (error[0], error[1], matching), sys.stderr)
task_sleep_now_if_required(can_stop_too=True)
return 1
def main():
""" Main that constructs all the bibtask. """
task_init(authorization_action='runbatchuploader',
authorization_msg="Batch Uploader",
description="""Description:
The batch uploader has two different run modes.
If --metadata is specified (by default) then all files in folders insert,
append, correct and replace are uploaded using the corresponding mode.
If mode --documents is selected all documents present in folders named
append and revise are uploaded using the corresponding mode.
Parent directory for batch uploader must be specified in the
invenio configuration file.\n""",
help_specific_usage=""" -m, --metadata\t Batch Uploader will look for metadata files in the corresponding folders
-d, --documents\t Batch Uploader will look for documents in the corresponding folders
""",
version=__revision__,
specific_params=("md:", ["metadata", "documents"]),
task_submit_elaborate_specific_parameter_fnc=task_submit_elaborate_specific_parameter,
task_run_fnc=task_run_core)
if __name__ == '__main__':
main()
| chokribr/invenio | invenio/legacy/batchuploader/cli.py | Python | gpl-2.0 | 6,648 |
# -*- coding: utf-8 -*-
import datetime
import os
import numpy as np
import pandas as pd
from decouple import config
from WindAdapter.enums import Header
from WindAdapter.enums import OutputFormat
DATA_DICT_PATH = config('DATA_DICT_PATH', default='data_dict.csv')
DATA_DICT_PATH_TYPE_ABS = config('DATA_DICT_PATH_TYPE_ABS', default=False, cast=bool)
INDEX_NAME = config('MULTI_INDEX_NAMES', default='date,secID')
COL_NAME = config('DF_COL_NAME', default='factor')
class WindQueryHelper:
def __init__(self, data_dict_path=DATA_DICT_PATH, path_type_abs=DATA_DICT_PATH_TYPE_ABS):
try:
if not path_type_abs:
current_dir = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(current_dir, data_dict_path)
self.data_dict_path = path
else:
self.data_dict_path = data_dict_path
self._data_dict = pd.read_csv(self.data_dict_path, index_col=0, encoding='gbk')
except ValueError:
raise ValueError('data_dict fails to load')
@property
def data_dict(self):
return self._data_dict
@staticmethod
def _split_params(params):
main_params = params[[Header.API, Header.EXPLANATION, Header.INDICATOR]]
extra_params = params.drop([Header.API, Header.EXPLANATION, Header.INDICATOR, Header.TYPE])
extra_params[Header.TENOR.value] = np.nan
extra_params[Header.FREQ.value] = 'M'
return main_params, extra_params
def get_query_params(self, factor_name=None):
try:
self.data_dict.index = self.data_dict.index.str.lower()
factor_params = self.data_dict.loc[factor_name.lower()]
except:
raise ValueError(
'WindQueryHelper.get_query_params: failed to find params for factor {0}, check factor name spelling'.format(
factor_name))
main_params, extra_params = WindQueryHelper._split_params(factor_params)
main_params[Header.API] = 'w.' + main_params[Header.API]
return main_params, extra_params
@staticmethod
def convert_2_multi_index(df):
df = df.copy()
df = df.stack()
df = pd.DataFrame(df)
df.index.names = INDEX_NAME.split(',')
df.columns = [COL_NAME]
return df
@staticmethod
def reformat_wind_data(raw_data, date, output_data_format=OutputFormat.PIVOT_TABLE_DF, multi_factors=False):
if not multi_factors:
ret = pd.DataFrame(data=raw_data.Data,
columns=raw_data.Codes,
index=[date.strftime('%Y-%m-%d')])
if output_data_format == OutputFormat.MULTI_INDEX_DF:
ret = WindQueryHelper.convert_2_multi_index(ret)
elif 'windcode' not in raw_data.Fields:
ret = pd.DataFrame(data=np.array(raw_data.Data).T,
index=pd.MultiIndex.from_product([raw_data.Codes, [date.strftime('%Y-%m-%d')]],
names=['secID', 'date']),
columns=[raw_data.Fields])
else:
# ret = pd.DataFrame(data=np.array(raw_data.Data).T,
# index=pd.MultiIndex.from_arrays([raw_data.Times, raw_data.Codes*len(raw_data.Times)],
# names=['date', 'secID']),
# columns=raw_data.Fields)
ret = pd.DataFrame(data=np.array(raw_data.Data[2:]).T,
index=pd.MultiIndex.from_arrays([raw_data.Data[raw_data.Fields.index('windcode')],
raw_data.Times],
names=['secID', 'date']),
columns=[field for field in raw_data.Fields if field != 'time' and field != 'windcode'])
return ret
@staticmethod
def latest_report_date(date):
month = date.month
if month <= 4:
date = datetime.datetime(date.year - 1, 9, 30)
elif month <= 8:
date = datetime.datetime(date.year, 3, 31)
elif month <= 11:
date = datetime.datetime(date.year, 6, 30)
else:
date = datetime.datetime(date.year, 9, 30)
return date
| iLampard/WindAdapter | WindAdapter/helper.py | Python | mit | 4,406 |
__author__ = 'fgarcia'
| candango/socialspider | diasporapy/engine/components/__init__.py | Python | apache-2.0 | 23 |
"""This script evaluates scipy's implementation of hyp2f1 against mpmath's.
Author: Albert Steppi
This script is long running and generates a large output file. With default
arguments, the generated file is roughly 700MB in size and it takes around
40 minutes using an Intel(R) Core(TM) i5-8250U CPU with n_jobs set to 8
(full utilization). There are optional arguments which can be used to restrict
(or enlarge) the computations performed. These are described below.
The output of this script can be analyzed to identify suitable test cases and
to find parameter and argument regions where hyp2f1 needs to be improved.
The script has one mandatory positional argument for specifying the path to
the location where the output file is to be placed, and 4 optional arguments
--n_jobs, --grid_size, --regions, and --parameter_groups. --n_jobs specifies
the number of processes to use if running in parallel. The default value is 1.
The other optional arguments are explained below.
Produces a tab separated values file with 11 columns. The first four columns
contain the parameters a, b, c and the argument z. The next two contain |z| and
a region code for which region of the complex plane belongs to. The regions are
1) |z| < 0.9 and real(z) >= 0
2) |z| <= 1 and real(z) < 0
3) 0.9 <= |z| <= 1 and |1 - z| < 1.0:
4) 0.9 <= |z| =< 1 and |1 - z| >= 1.0 and real(z) >= 0:
5) |z| > 1
The --regions optional argument allows the user to specify a list of regions
to which computation will be restricted.
Parameters a, b, c are taken from a 10 * 10 * 10 grid with values at
-16, -8, -4, -2, -1, 1, 2, 4, 8, 16
with random perturbations applied.
There are 8 parameter groups handling the following cases.
1) A, B, C, B - A, C - A, C - B, C - A - B all non-integral.
2) B - A integral
3) C - A integral
4) C - B integral
5) C - A - B integral
6) A integral
7) B integral
8) C integral
The seventh column of the output file is an integer between 1 and 8 specifying
the parameter group as above.
The --parameter_groups optional argument allows the user to specify a list of
parameter groups to which computation will be restricted.
The argument z is taken from a grid in the box
-2 <= real(z) <= 2, -2 <= imag(z) <= 2.
with grid size specified using the optional command line argument --grid-size.
The default value is 20, yielding a 20 * 20 grid in this box.
The final four columns have the expected value of hyp2f1 for the given
parameters and argument as calculated with mpmath, the observed value
calculated with scipy's hyp2f1, the relative error, and the absolute error.
As special cases of hyp2f1 are moved from the original Fortran implementation
into Cython, this script can be used to ensure that no regressions occur and
to point out where improvements are needed.
"""
import os
import csv
import argparse
import numpy as np
from itertools import product
from multiprocessing import Pool
from scipy.special import hyp2f1
from scipy.special.tests.test_hyp2f1 import mp_hyp2f1
def get_region(z):
"""Assign numbers for regions where hyp2f1 must be handled differently."""
if abs(z) < 0.9 and z.real >= 0:
return 1
elif abs(z) <= 1 and z.real < 0:
return 2
elif 0.9 <= abs(z) <= 1 and abs(1 - z) < 1.0:
return 3
elif 0.9 <= abs(z) <= 1 and abs(1 - z) >= 1.0:
return 4
else:
return 5
def get_result(a, b, c, z, group):
"""Get results for given parameter and value combination."""
expected, observed = mp_hyp2f1(a, b, c, z), hyp2f1(a, b, c, z)
relative_error = abs(expected - observed) / abs(expected)
return (
a,
b,
c,
z,
abs(z),
get_region(z),
group,
expected,
observed,
relative_error,
abs(expected - observed),
)
def get_results(params, Z, n_jobs=1):
"""Batch compute results for multiple parameter and argument values.
Parameters
----------
params : iterable
iterable of tuples of floats (a, b, c) specificying parameter values
a, b, c for hyp2f1
Z : iterable of complex
Arguments at which to evaluate hyp2f1
n_jobs : Optional[int]
Number of jobs for parallel execution.
Returns
-------
list
List of tuples of results values. See return value in source code
of `get_result`.
"""
input_ = (
(a, b, c, z, group) for (a, b, c, group), z in product(params, Z)
)
with Pool(n_jobs) as pool:
rows = pool.starmap(get_result, input_)
return rows
def _make_hyp2f1_test_case(a, b, c, z, rtol):
"""Generate string for single test case as used in test_hyp2f1.py."""
expected = mp_hyp2f1(a, b, c, z)
return (
" pytest.param(\n"
" Hyp2f1TestCase(\n"
f" a={a},\n"
f" b={b},\n"
f" c={c},\n"
f" z={z},\n"
f" expected={expected},\n"
f" rtol={rtol},\n"
" ),\n"
" ),"
)
def make_hyp2f1_test_cases(rows):
"""Generate string for a list of test cases for test_hyp2f1.py.
Parameters
----------
rows : list
List of lists of the form [a, b, c, z, rtol] where a, b, c, z are
parameters and the argument for hyp2f1 and rtol is an expected
relative error for the associated test case.
Returns
-------
str
String for a list of test cases. The output string can be printed
or saved to a file and then copied into an argument for
`pytest.mark.parameterize` within `scipy.special.tests.test_hyp2f1.py`.
"""
result = "[\n"
result += '\n'.join(
_make_hyp2f1_test_case(a, b, c, z, rtol)
for a, b, c, z, rtol in rows
)
result += "\n]"
return result
def main(outpath, n_jobs=1, grid_size=20, regions=None, parameter_groups=None):
outpath = os.path.realpath(os.path.expanduser(outpath))
random_state = np.random.RandomState(1234)
# Parameters a, b, c selected near these values.
root_params = np.array(
[-16, -8, -4, -2, -1, 1, 2, 4, 8, 16]
)
# Perturbations to apply to root values.
perturbations = 0.1 * random_state.random_sample(
size=(3, len(root_params))
)
params = []
# Parameter group 1
# -----------------
# No integer differences. This has been confirmed for the above seed.
A = root_params + perturbations[0, :]
B = root_params + perturbations[1, :]
C = root_params + perturbations[2, :]
params.extend(
sorted(
((a, b, c, 1) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
# Parameter group 2
# -----------------
# B - A an integer
A = root_params + 0.5
B = root_params + 0.5
C = root_params + perturbations[1, :]
params.extend(
sorted(
((a, b, c, 2) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
# Parameter group 3
# -----------------
# C - A an integer
A = root_params + 0.5
B = root_params + perturbations[1, :]
C = root_params + 0.5
params.extend(
sorted(
((a, b, c, 3) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
# Parameter group 4
# -----------------
# C - B an integer
A = root_params + perturbations[0, :]
B = root_params + 0.5
C = root_params + 0.5
params.extend(
sorted(
((a, b, c, 4) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
# Parameter group 5
# -----------------
# C - A - B an integer
A = root_params + 0.25
B = root_params + 0.25
C = root_params + 0.5
params.extend(
sorted(
((a, b, c, 5) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
# Parameter group 6
# -----------------
# A an integer
A = root_params
B = root_params + perturbations[0, :]
C = root_params + perturbations[1, :]
params.extend(
sorted(
((a, b, c, 6) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
# Parameter group 7
# -----------------
# B an integer
A = root_params + perturbations[0, :]
B = root_params
C = root_params + perturbations[1, :]
params.extend(
sorted(
((a, b, c, 7) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
# Parameter group 8
# -----------------
# C an integer
A = root_params + perturbations[0, :]
B = root_params + perturbations[1, :]
C = root_params
params.extend(
sorted(
((a, b, c, 8) for a, b, c in product(A, B, C)),
key=lambda x: max(abs(x[0]), abs(x[1])),
)
)
if parameter_groups is not None:
params = [
(a, b, c, group) for a, b, c, group in params
if group in parameter_groups
]
# grid_size * grid_size grid in box with corners
# -2 - 2j, -2 + 2j, 2 - 2j, 2 + 2j
X, Y = np.meshgrid(
np.linspace(-2, 2, grid_size), np.linspace(-2, 2, grid_size)
)
Z = X + Y * 1j
Z = Z.flatten().tolist()
if regions is not None:
Z = [z for z in Z if get_region(z) in regions]
# Evaluate scipy and mpmath's hyp2f1 for all parameter combinations
# above against all arguments in the grid Z
rows = get_results(params, Z, n_jobs=n_jobs)
with open(outpath, "w", newline="") as f:
writer = csv.writer(f, delimiter="\t")
writer.writerow(
[
"a",
"b",
"c",
"z",
"|z|",
"region",
"parameter_group",
"expected", # mpmath's hyp2f1
"observed", # scipy's hyp2f1
"relative_error",
"absolute_error",
]
)
for row in rows:
writer.writerow(row)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Test scipy's hyp2f1 against mpmath's on a grid in the"
" complex plane over a grid of parameter values. Saves output to file"
" specified in positional argument \"outpath\"."
" Caution: With default arguments, the generated output file is"
" roughly 700MB in size. Script may take several hours to finish if"
" \"--n_jobs\" is set to 1."
)
parser.add_argument(
"outpath", type=str, help="Path to output tsv file."
)
parser.add_argument(
"--n_jobs",
type=int,
default=1,
help="Number of jobs for multiprocessing.",
)
parser.add_argument(
"--grid_size",
type=int,
default=20,
help="hyp2f1 is evaluated on grid_size * grid_size grid in box of side"
" length 2 centered at the origin."
)
parser.add_argument(
"--parameter_groups",
type=int,
nargs='+',
default=None,
help="Restrict to supplied parameter groups. See the Docstring for"
" this module for more info on parameter groups. Calculate for all"
" parameter groups by default."
)
parser.add_argument(
"--regions",
type=int,
nargs='+',
default=None,
help="Restrict to argument z only within the supplied regions. See"
" the Docstring for this module for more info on regions. Calculate"
" for all regions by default."
)
args = parser.parse_args()
main(
args.outpath,
n_jobs=args.n_jobs,
grid_size=args.grid_size,
parameter_groups=args.parameter_groups,
regions=args.regions
)
| grlee77/scipy | scipy/special/_precompute/hyp2f1_data.py | Python | bsd-3-clause | 12,050 |
from astropy.io import fits
import numpy as np
from desitarget import desi_mask
tt = fits.getdata('targets-dr2-0.2.2.fits')
bgs = np.where( (tt.DESI_TARGET & desi_mask.BGS_ANY) != 0 )[0]
url = "http://legacysurvey.org/viewer/?ra={ra}&dec={dec}&zoom=15&layer=decals-dr2p"
for i in bgs[0:100]:
print url.format(ra=tt.RA[i], dec=tt.DEC[i])
module load desitarget/0.2.0
cd /project/projectdirs/desi/target
select_targets /project/projectdirs/desiproc/dr2/sweep/ targets-dr2-0.2.0.fits -v
| moustakas/moustakas-projects | legacysurvey/decals_cutouts.py | Python | gpl-2.0 | 505 |
# Django settings for tango_with_django_project project.
import os
PROJECT_PATH = os.getcwd()
TEMPLATE_PATH = os.path.join(PROJECT_PATH, 'templates')
STATIC_PATH = os.path.join(PROJECT_PATH, 'static')
DATABASE_PATH = os.path.join(PROJECT_PATH, 'rango.db')
#MEDIA_URL = '/media/' #code shouldn't be here!!!
#MEDIA_ROOT = os.path.join(PROJECT_PATH, 'media') #code shouldn't be here!!!
#absolute path is: <workspace>/tango_with_django_project/media/
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'DATABASE_PATH', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
# 'USER': '',
# 'PASSWORD': '',
# 'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
# 'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_PATH, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/' #already defined, using in tango_with_django currently
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
STATIC_PATH,
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '%*ee$&u%v(nr+k=ey(i9+eyf*kbcicse1#%mx3iqwoj94lqf3='
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'tango_with_django_project.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'tango_with_django_project.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
#"home/client/rangoapp/tango_with_django_project/templates", #Don't need this hardcode path
TEMPLATE_PATH,
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'rango',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
LOGIN_URL = '/rango/login/'
| Kentoseth/rangoapp | tango_with_django_project/tango_with_django_project/settings.py | Python | mit | 6,169 |
from pythonforandroid.toolchain import PythonRecipe, shprint
import sh
class FlaskRecipe(PythonRecipe):
version = '0.10.1' # The webserver of 'master' seems to fail
# after a little while on Android, so use
# 0.10.1 at least for now
url = 'https://github.com/pallets/flask/archive/{version}.zip'
depends = [('python2', 'python3crystax'), 'setuptools']
python_depends = ['jinja2', 'werkzeug', 'markupsafe', 'itsdangerous', 'click']
call_hostpython_via_targetpython = False
install_in_hostpython = False
recipe = FlaskRecipe()
| inclement/python-for-android | pythonforandroid/recipes/flask/__init__.py | Python | mit | 606 |
from celery.task import Task
from celery.registry import tasks
from django.conf import settings
from django.core.management.base import NoArgsCommand
from mailer.engine import send_all
from mailer.models import Message
PAUSE_SEND = getattr(settings, "MAILER_PAUSE_SEND", False)
class RetryDeferred(Task):
name = 'mailer.retry_deferred'
def run(self, **kwargs):
logger = self.get_logger(**kwargs)
count = Message.objects.retry_deferred()
logger.info("%s message(s) retried" % count)
class SendMail(Task):
name = 'mailer.send_mail'
def run(self, **kwargs):
logger = self.get_logger(**kwargs)
if not PAUSE_SEND:
send_all()
else:
logger.info("sending is paused, quitting.")
tasks.register(RetryDeferred)
tasks.register(SendMail)
| code06/django-mailer | mailer/tasks.py | Python | mit | 843 |
# check flag example
class GetFlag():
DEBUG = False
def recv_until_colon(self, s):
ret = ""
while True:
data = s.recv(1)
if len(data) == 0:
raise Exception("Socket is broken. socket.recv() returns empty string.")
ret += data
if ret[len(ret) - 1 : ] == ':':
break
return ret
def recv_until_linebreak(self, s):
ret = ""
while True:
data = s.recv(1)
if len(data) == 0:
raise Exception("Socket is broken. socket.recv() returns empty string.")
ret += data
if ret[len(ret) - 1 : ] == '\n':
break
return ret
def recv_until_main_menu(self, s):
ret = ""
while True:
data = s.recv(1)
if len(data) == 0:
raise Exception("Socket is broken. socket.recv() returns empty string.")
ret += data
if ret[len(ret) - 10 : ] == ' ========\n':
break
return ret
# We generate a username according to current flag_id.
def gen_username(self, flag_id):
LEGITIMATE_USERNAME = ["LetMeFly","TryThis","CatLovers","test00001","fanqiang","YouMotherFxxxers","realcompanyltd","y4nSh0shit4shvi1i"]
import md5
m = md5.new()
m.update(flag_id)
digest = m.hexdigest()
index = int(digest, 16) % len(LEGITIMATE_USERNAME)
return LEGITIMATE_USERNAME[index] + digest[2 : 5]
# The password is generated according to current username
def gen_password(self, username):
LEGITIMATE_PASSWORD = ["McDonalds","pleasedontcr4sh","123456","13133211","Op0PoPop", "poiu;lkj"]
import md5
m = md5.new()
m.update(username)
digest = m.hexdigest()
index = int(digest, 16) % len(LEGITIMATE_PASSWORD)
return LEGITIMATE_PASSWORD[index] + digest[4 : 7]
def client_register(self, s, username, password):
if self.DEBUG: print "client_register(%s, %s)" % (username, password)
self.recv_until_colon(s)
s.send("1\n") # Register
self.recv_until_colon(s)
s.send(username + "\n") # Username
self.recv_until_colon(s)
s.send(password + "\n") # Password
self.recv_until_colon(s)
s.send(password + "\n") # Repeat password
data = self.recv_until_linebreak(s)
if self.DEBUG: print data
if data.find("succeeded") != -1:
# Registration succeeded
return True
else:
# Registration failed, either duplicated name, or it just doesn't work
return False
def client_login(self, s, username, password):
if self.DEBUG: print "client_login(%s, %s)" % (username, password)
self.recv_until_colon(s)
s.send("2\n") # Login
self.recv_until_colon(s)
s.send(username + "\n") # Username
self.recv_until_colon(s)
s.send(password + "\n") # Password
data = self.recv_until_linebreak(s)
if self.DEBUG: print data
if data.find("Successfully") != -1:
# Logged-in successfully
return True
else:
return False
def client_list_driller(self, s, driller_name):
if self.DEBUG: print "client_list_driller(%s)" % (driller_name)
self.recv_until_colon(s)
s.send("4\n") # List drillers (admin)
self.recv_until_colon(s)
s.send(driller_name + "\n")
data = self.recv_until_main_menu(s)
if self.DEBUG: print data
import re
p = re.compile(r'Name: (\S*)\nLocation: ([\S ]*)\n')
tuples = p.findall(data)
flag = ""
if len(tuples) > 0:
flag = tuples[0][1]
return flag
def do_getflag(self, s, flag_id, token):
if self.DEBUG: print 'Getting flag..'
# Username is decided by flag_id
username = self.gen_username(flag_id)
# Password is decided by username :)
password = self.gen_password(username)
# Skip the first colon after "Statistics"
self.recv_until_colon(s)
# Try to register
ret = self.client_register(s, username, password)
# Try to login
ret = self.client_login(s, username, password)
if not ret:
if self.DEBUG: print "Login failed. Is service down?"
# Service is down?
return
# List the drillers
flag = self.client_list_driller(s, token)
return flag
def execute(self, ip, port, flag_id, token):
from socket import socket
flag = ''
error = 0
error_msg = ''
try:
s = socket()
s.connect((ip, port))
flag = self.do_getflag(s, flag_id, token)
except Exception as e:
error = 42
error_msg = str(e)
self.flag = flag
self.error = error
self.error_msg = error_msg
def result(self):
return {'FLAG' : self.flag,
'ERROR' : self.error,
'ERROR_MSG' : self.error_msg,
}
| diegorusso/ictf-framework | services/driller/scripts/getflag.py | Python | gpl-2.0 | 5,169 |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(andreykurilin): most tests for sqlalchemy api is merged with db_api
# tests. Hope, it will be fixed someday.
import collections
import datetime as dt
import ddt
from rally.common.db.sqlalchemy import api as db_api
from tests.unit import test
NOW = dt.datetime.now()
class FakeSerializable(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
def _as_dict(self):
return self.__dict__
@ddt.ddt
class SerializeTestCase(test.DBTestCase):
def setUp(self):
super(SerializeTestCase, self).setUp()
@ddt.data(
{"data": 1, "serialized": 1},
{"data": 1.1, "serialized": 1.1},
{"data": "a string", "serialized": "a string"},
{"data": NOW, "serialized": NOW},
{"data": {"k1": 1, "k2": 2}, "serialized": {"k1": 1, "k2": 2}},
{"data": [1, "foo"], "serialized": [1, "foo"]},
{"data": ["foo", 1, {"a": "b"}], "serialized": ["foo", 1, {"a": "b"}]},
{"data": FakeSerializable(a=1), "serialized": {"a": 1}},
{"data": [FakeSerializable(a=1),
FakeSerializable(b=FakeSerializable(c=1))],
"serialized": [{"a": 1}, {"b": {"c": 1}}]},
)
@ddt.unpack
def test_serialize(self, data, serialized):
@db_api.serialize
def fake_method():
return data
results = fake_method()
self.assertEqual(serialized, results)
def test_serialize_ordered_dict(self):
data = collections.OrderedDict([(1, 2), ("foo", "bar"), (2, 3)])
serialized = db_api.serialize_data(data)
self.assertIsInstance(serialized, collections.OrderedDict)
self.assertEqual([1, "foo", 2], list(serialized.keys()))
self.assertEqual([2, "bar", 3], list(serialized.values()))
def test_serialize_value_error(self):
@db_api.serialize
def fake_method():
class Fake(object):
pass
return Fake()
self.assertRaises(ValueError, fake_method)
| yeming233/rally | tests/unit/common/db/test_sqlalchemy/test_api.py | Python | apache-2.0 | 2,631 |
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
import ez_setup
ez_setup.use_setuptools()
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='inspyred',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.0.1',
description='A framework for creating bio-inspired computational intelligence algorithms in Python',
long_description=long_description,
# The project's main homepage.
url='https://inspyred.github.io',
# Author details
author='Aaron Garrett',
author_email='aaron.lee.garrett@gmail.com',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='optimization evolutionary computation genetic algorithm particle ' + \
'swarm estimation distribution differential evolution nsga paes ' + \
'island model multiobjective ant colony',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
#install_requires=[],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage', 'pytest'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
#package_data={
# 'sample': ['package_data.dat'],
#},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
#data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
#entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
#},
) | hishnash/inspyred | setup.py | Python | mit | 4,344 |
class TransformCSV:
def __init__(self):
pass
def process(self, data):
self.data = data
newdata = []
count = 0
for row in data:
if count == 0:
count = count + 1
else:
x = {'ORDER_ID': row[0], 'ORDER_LINE_ID': row[1], 'ORDER_LINE_ITEM_ID': row[2], 'GROSS_MARGIN': int(float(row[10])) - int(float(row[9]))}
newdata.append(x)
return newdata
class TransformJSON:
def __init__(self):
pass
def process(self, data):
self.data = data
newdata = []
for item in data:
x = {'ORDER_ID': item['order_id'], 'ORDER_LINE_ID': item['order_line_id'], 'ORDER_LINE_ITEM_ID': item['order_line_item_id'], 'GROSS_MARGIN': int(item['sales_price']) - int(item['base_price'])}
newdata.append(x)
return newdata | ThomasSt0rm/etl-python | transformer/transformer.py | Python | apache-2.0 | 886 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.