text stringlengths 4 1.02M | meta dict |
|---|---|
import io
import json
import pytest
from ansible_inventory_hacks.filters import instance_filter
@pytest.fixture
def output(sampledata):
return instance_filter.filter_json(sampledata['ec2']['inventory'])
@pytest.fixture
def fake_stdin(sampledata):
return io.BytesIO(json.dumps(sampledata['ec2']['inventory']))
def test_maps_each_instance_to_its_primary_ip_or_name(output):
assert '10.1.1.91' in output
assert output['10.1.1.91'] == 'i-6932895f'
def test_main_text_output(monkeypatch, capsys, fake_stdin):
monkeypatch.setattr('sys.stdin', fake_stdin)
instance_filter.main([
'exe',
'-',
'-f', 'text'
])
out, err = capsys.readouterr()
assert '' == err
assert '10.1.1.91' in out
def test_main_json_output(monkeypatch, capsys, fake_stdin):
monkeypatch.setattr('sys.stdin', fake_stdin)
instance_filter.main([
'exe',
'-',
'-f', 'json'
])
out, err = capsys.readouterr()
assert '' == err
assert '"10.1.1.91"' in out
| {
"content_hash": "e5fcbe7f8efbb890892b1f3c836ab0a7",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 70,
"avg_line_length": 21.95744680851064,
"alnum_prop": 0.6346899224806202,
"repo_name": "meatballhat/ansible-inventory-hacks",
"id": "4268ee8bb7a0d5d83d98466ae53f204e699777f8",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/filters/test_instance_filter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37355"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
"""
Model optimisation.
@author Yann Carbonne / modified by Aurelien Galicher
Use of hyperopt module to optimise a XGBoost model.
Great tutorial: http://conference.scipy.org/proceedings/scipy2013/pdfs/bergstra_hyperopt.pdf
Store each predictions in folder hyperopt_preds/
"""
from preprocessing import load_preprocessing
from sklearn.cross_validation import StratifiedKFold
from sklearn.metrics import log_loss
from hyperopt import fmin, tpe, hp, STATUS_OK
#import xgboost as xgb
import numpy as np
import pandas as pd
from sklearn.linear_model import SGDClassifier
def eval_param(params):
"""Evaluation of one set of xgboost's params.
Then, use 3 folds as training and cv in a row as xgboost's watchlist with an early_stop at 50.
"""
global df_results, train, target, test
print ("Training with params : ")
print (params)
random_state = 42
avg_score = 0.
n_folds = 3
predict = np.zeros(test.shape[0])
#dtest = xgb.DMatrix(test)
skf = StratifiedKFold(target, n_folds=n_folds, random_state=random_state)
for train_index, cv_index in skf:
# train
x_train, x_cv = train[train_index], train[cv_index]
y_train, y_cv = target[train_index], target[cv_index]
#dtrain = xgb.DMatrix(x_train, label=y_train)
#dvalid = xgb.DMatrix(x_cv, label=y_cv)
#watchlist = [(dtrain, 'train'), (dvalid, 'eval')]
clf = SGDClassifier(**params).fit(x_train, y_train)
#bst = xgb.train(params, dtrain, num_round, watchlist, early_stopping_rounds=early_stopping_rounds, maximize=True)
# test / score
predict_cv = clf.predict_proba(x_cv)[:,1]
avg_score += -log_loss(y_cv, predict_cv)
predict += clf.predict_proba(test)[:,1]#bst.predict(dtest, ntree_limit=bst.best_iteration)
predict /= n_folds
avg_score /= n_folds
# store
new_row = pd.DataFrame([np.append([avg_score], list(params.values()))],
columns=np.append(['score'], list(params.keys())))
df_results = df_results.append(new_row, ignore_index=True)
#np.savetxt('hyperopt_preds/pred' + str(df_results.index.max()) + '.txt', predict, fmt='%s')
df_results.to_csv('hyperopt_results_sgd.csv')
print ("\tScore {0}\n\n".format(avg_score))
return {'loss': - avg_score, 'status': STATUS_OK}
if __name__ == '__main__':
train, target, test, _, _, _, _ = load_preprocessing()
space = {'alpha' : hp.loguniform('alpha', -7, -1),
#'eta': hp.quniform('eta', 0.0001, 0.01, 0.0001),
'penalty':'elasticnet',
'loss': 'log',
#'class_weight': None,
'power_t': 0.5,
'n_jobs': -1,
'l1_ratio': hp.uniform('l1_ratio', 0, 0.5)
}
df_results = pd.DataFrame(columns=np.append(['score'], list(space.keys())))
best = fmin(eval_param, space, algo=tpe.suggest, max_evals=300)
print (best)
# Send a text message with twillio
#from my_phone import send_text
#send_text("Best params: " + str(best)) | {
"content_hash": "595045397f5c2d2306f8791e346cdd28",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 122,
"avg_line_length": 39.166666666666664,
"alnum_prop": 0.6271685761047463,
"repo_name": "AurelienGalicher/DStoolkit",
"id": "20e4dce94eebf5276ee5bdcf787383f6b707816a",
"size": "3055",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "optim_hyperopt_SGD.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "27727"
},
{
"name": "Python",
"bytes": "36432"
}
],
"symlink_target": ""
} |
from .. import db
from werkzeug.security import generate_password_hash, check_password_hash
import random, string
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String, unique=True)
first_name = db.Column(db.String)
last_name = db.Column(db.String)
pw_hash = db.Column(db.String)
editor = db.Column(db.String)
fontsize = db.Column(db.String)
confirmed = db.Column(db.Boolean, nullable=False, default=False)
def __init__(self, email, first_name=None, last_name=None,password=None,
confirmed=False):
self.email = email.lower()
self.first_name = first_name
self.last_name = last_name
self.set_password(password)
self.confirmed = confirmed
self.editor = "NONE"
self.fontsize = "12"
def set_password(self, password):
# If the password was not specified (Login from Facebook/Google then
# create a random 32 charachter password)
# Otherwise users can login by entering empty string as password
if password is None:
password = ''.join(random.choice(string.ascii_uppercase +
string.digits) for _ in range(32))
self.pw_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.pw_hash, password)
# These four methods are for Flask-Login
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
def get_editor(self):
return self.editor
def set_editor(self, ed):
self.editor = ed
def get_email(self):
return self.email
def get_first_name(self):
return self.first_name
def get_fontsize(self):
return self.fontsize
def set_fontsize(self, fs):
self.fontsize = fs
def is_email_confirmed(self):
return self.confirmed
| {
"content_hash": "ab230e8f9f7bbc7c47756ef0df7fbd82",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 73,
"avg_line_length": 25.366197183098592,
"alnum_prop": 0.7207107162687396,
"repo_name": "mereckaj/CS4098-Group-E",
"id": "605c5bde631d9df58636f77a8c22330b864023e4",
"size": "1801",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/main/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "59058"
},
{
"name": "HTML",
"bytes": "15648"
},
{
"name": "JavaScript",
"bytes": "25116431"
},
{
"name": "Python",
"bytes": "23102"
},
{
"name": "Shell",
"bytes": "6655"
}
],
"symlink_target": ""
} |
import pymongo
from .model_base import Model
from girder import logprint
from girder.exceptions import ValidationException
from girder.settings import SettingDefault
from girder.utility import setting_utilities
from girder.utility._cache import cache
class Setting(Model):
"""
This model represents server-wide configuration settings as key/value pairs.
"""
def initialize(self):
self.name = 'setting'
# We had been asking for an index on key, like so:
# self.ensureIndices(['key'])
# We really want the index to be unique, which could be done:
# self.ensureIndices([('key', {'unique': True})])
# We can't do it here, as we have to update and correct older installs,
# so this is handled in the reconnect method.
def reconnect(self):
"""
Reconnect to the database and rebuild indices if necessary. If a
unique index on key does not exist, make one, first discarding any
extant index on key and removing duplicate keys if necessary.
"""
super().reconnect()
try:
indices = self.collection.index_information()
except pymongo.errors.OperationFailure:
indices = []
hasUniqueKeyIndex = False
presentKeyIndices = []
for index in indices:
if indices[index]['key'][0][0] == 'key':
if indices[index].get('unique'):
hasUniqueKeyIndex = True
break
presentKeyIndices.append(index)
if not hasUniqueKeyIndex:
for index in presentKeyIndices:
self.collection.drop_index(index)
duplicates = self.collection.aggregate([{
'$group': {'_id': '$key',
'key': {'$first': '$key'},
'ids': {'$addToSet': '$_id'},
'count': {'$sum': 1}}}, {
'$match': {'count': {'$gt': 1}}}])
for duplicate in duplicates:
logprint.warning(
'Removing duplicate setting with key %s.' % (
duplicate['key']))
# Remove all of the duplicates. Keep the item with the lowest
# id in Mongo.
for duplicateId in sorted(duplicate['ids'])[1:]:
self.collection.delete_one({'_id': duplicateId})
self.collection.create_index('key', unique=True)
def validate(self, doc):
"""
This method is in charge of validating that the setting key is a valid
key, and that for that key, the provided value is valid. It first
allows plugins to validate the setting, but if none of them can, it
assumes it is a core setting and does the validation here.
"""
key = doc['key']
validator = setting_utilities.getValidator(key)
if validator:
validator(doc)
else:
raise ValidationException('Invalid setting key "%s".' % key, 'key')
return doc
@cache.cache_on_arguments()
def _get(self, key):
"""
This method is so built in caching decorators can be used without specifying
custom logic for dealing with the default kwarg of self.get.
"""
return self.findOne({'key': key})
def get(self, key):
"""
Retrieve a setting by its key.
:param key: The key identifying the setting.
:type key: str
"""
setting = self._get(key)
if setting is None:
return self.getDefault(key)
else:
return setting['value']
def set(self, key, value):
"""
Save a setting. If a setting for this key already exists, this will
replace the existing value.
:param key: The key identifying the setting.
:type key: str
:param value: The object to store for this setting.
:returns: The document representing the saved Setting.
"""
setting = self.findOne({'key': key})
if setting is None:
setting = {
'key': key,
'value': value
}
else:
setting['value'] = value
# Invalidate the cache so that events that listen for a change in the
# setting will be get the right value from a get. We don't set it here
# as it could fail to validate and save.
self._get.invalidate(self, key)
setting = self.save(setting)
self._get.set(setting, self, key)
return setting
def unset(self, key):
"""
Remove the setting for this key. If no such setting exists, this is
a no-op.
:param key: The key identifying the setting to be removed.
:type key: str
"""
self._get.invalidate(self, key)
for setting in self.find({'key': key}):
self.remove(setting)
def getDefault(self, key):
"""
Retrieve the system default for a value.
:param key: The key identifying the setting.
:type key: str
:returns: The default value if the key is present in both SettingKey
and referenced in SettingDefault; otherwise None.
"""
if key in SettingDefault.defaults:
return SettingDefault.defaults[key]
else:
fn = setting_utilities.getDefaultFunction(key)
if callable(fn):
return fn()
return None
| {
"content_hash": "4962bcf2d85903aaf0526ecac612f4d4",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 84,
"avg_line_length": 34.98101265822785,
"alnum_prop": 0.5657680477655147,
"repo_name": "Kitware/girder",
"id": "c5d0bbf49bf592621fb206962c819a8bad4c65d5",
"size": "5551",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "girder/models/setting.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "26244"
},
{
"name": "CSS",
"bytes": "6537"
},
{
"name": "Dockerfile",
"bytes": "1528"
},
{
"name": "HTML",
"bytes": "14"
},
{
"name": "JavaScript",
"bytes": "1176017"
},
{
"name": "Jinja",
"bytes": "322"
},
{
"name": "Mako",
"bytes": "7571"
},
{
"name": "Pug",
"bytes": "137980"
},
{
"name": "Python",
"bytes": "2018697"
},
{
"name": "Roff",
"bytes": "17"
},
{
"name": "Shell",
"bytes": "3354"
},
{
"name": "Stylus",
"bytes": "48706"
}
],
"symlink_target": ""
} |
from sqlalchemy import Column, DateTime, Float, String, text
from sqlalchemy.ext.declarative import declarative_base
from copy import deepcopy, copy
Base = declarative_base()
metadata = Base.metadata
class Host(Base):
__tablename__ = 'host'
entityId = Column(String(64), primary_key=True, nullable=False)
region = Column(String(16), primary_key=True, nullable=False)
host_name = Column(String(32), nullable=False)
role = Column(String(16), nullable=False)
aggregationType = Column(String(8), primary_key=True, nullable=False)
timestampId = Column(DateTime, primary_key=True, nullable=False, server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"))
avg_usedMemPct = Column(Float, nullable=False, server_default=text("'0'"))
avg_freeSpacePct = Column(Float, nullable=False, server_default=text("'0'"))
avg_cpuLoadPct = Column(Float, nullable=False, server_default=text("'0'"))
host_id = Column(String(16), nullable=False)
sysUptime = Column(Float, nullable=False, server_default=text("'0'"))
class HostService(Base):
__tablename__ = 'host_service'
entityId = Column(String(64), primary_key=True, nullable=False)
region = Column(String(32), primary_key=True, nullable=False)
entityType = Column(String(32), primary_key=True, nullable=False)
serviceType = Column(String(32), primary_key=True, nullable=False)
aggregationType = Column(String(8), primary_key=True, nullable=False)
timestampId = Column(DateTime, primary_key=True, nullable=False, server_default=text("CURRENT_TIMESTAMP"))
avg_Uptime = Column(Float, nullable=False, server_default=text("'0'"))
def __eq__(self, other):
se = dict(self.__dict__)
ot = dict(other.__dict__)
del se["_sa_instance_state"]
del se["avg_Uptime"]
del ot["_sa_instance_state"]
del ot["avg_Uptime"]
return isinstance(other, self.__class__) and se == ot
def __hash__(self):
tuple = (self.entityId, self.region, self.entityType, self.serviceType, self.aggregationType, self.timestampId)
# se = deepcopy(self)
# se._sa_instance_state = None
# se.avg_Uptime = None
return hash(tuple)
class Region(Base):
__tablename__ = 'region'
entityId = Column(String(16), primary_key=True, nullable=False)
entityType = Column(String(16), nullable=False)
aggregationType = Column(String(8), primary_key=True, nullable=False)
timestampId = Column(DateTime, primary_key=True, nullable=False, server_default=text("CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"))
avg_ram_used = Column(Float, nullable=False, server_default=text("'0'"))
avg_ram_tot = Column(Float, nullable=False, server_default=text("'0'"))
avg_core_enabled = Column(Float, nullable=False, server_default=text("'0'"))
avg_core_used = Column(Float, nullable=False, server_default=text("'0'"))
avg_core_tot = Column(Float, nullable=False, server_default=text("'0'"))
avg_hd_used = Column(Float, nullable=False, server_default=text("'0'"))
avg_hd_tot = Column(Float, nullable=False, server_default=text("'0'"))
avg_vm_tot = Column(Float, nullable=False, server_default=text("'0'"))
class Vm(Base):
__tablename__ = 'vm'
entityId = Column(String(64), primary_key=True, nullable=False)
region = Column(String(16), primary_key=True, nullable=False)
entityType = Column(String(16), nullable=False)
aggregationType = Column(String(8), primary_key=True, nullable=False)
timestampId = Column(DateTime, primary_key=True, nullable=False, server_default=text("CURRENT_TIMESTAMP"))
avg_usedMemPct = Column(Float, nullable=False, server_default=text("'0'"))
avg_freeSpacePct = Column(Float, nullable=False, server_default=text("'0'"))
avg_cpuLoadPct = Column(Float, nullable=False, server_default=text("'0'"))
host_name = Column(String(32), nullable=False)
sysUptime = Column(Float, nullable=False, server_default=text("'0'"))
| {
"content_hash": "8622e2e971edc8d480c2dfb034ee90d9",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 138,
"avg_line_length": 48.144578313253014,
"alnum_prop": 0.688938938938939,
"repo_name": "attybro/FIWARELab-monitoringAPI",
"id": "b41d8276f17767ddfb57ecfcc24a1249e7f1e90c",
"size": "4012",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "monitoringProxy/collectors/model_mysql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "124978"
}
],
"symlink_target": ""
} |
"""Support for Xiaomi Miio."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
import logging
import async_timeout
from miio import (
AirFresh,
AirHumidifier,
AirHumidifierMiot,
AirHumidifierMjjsq,
AirPurifier,
AirPurifierMB4,
AirPurifierMiot,
CleaningDetails,
CleaningSummary,
ConsumableStatus,
DeviceException,
DNDStatus,
Fan,
Fan1C,
FanP5,
FanP9,
FanP10,
FanP11,
FanZA5,
Timer,
Vacuum,
VacuumStatus,
)
from miio.gateway.gateway import GatewayException
from homeassistant import config_entries, core
from homeassistant.const import CONF_HOST, CONF_TOKEN
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
ATTR_AVAILABLE,
CONF_DEVICE,
CONF_FLOW_TYPE,
CONF_GATEWAY,
CONF_MODEL,
DOMAIN,
KEY_COORDINATOR,
KEY_DEVICE,
MODEL_AIRPURIFIER_3C,
MODEL_FAN_1C,
MODEL_FAN_P5,
MODEL_FAN_P9,
MODEL_FAN_P10,
MODEL_FAN_P11,
MODEL_FAN_ZA5,
MODELS_AIR_MONITOR,
MODELS_FAN,
MODELS_FAN_MIIO,
MODELS_HUMIDIFIER,
MODELS_HUMIDIFIER_MIIO,
MODELS_HUMIDIFIER_MIOT,
MODELS_HUMIDIFIER_MJJSQ,
MODELS_LIGHT,
MODELS_PURIFIER_MIOT,
MODELS_SWITCH,
MODELS_VACUUM,
AuthException,
SetupException,
)
from .gateway import ConnectXiaomiGateway
_LOGGER = logging.getLogger(__name__)
POLLING_TIMEOUT_SEC = 10
UPDATE_INTERVAL = timedelta(seconds=15)
GATEWAY_PLATFORMS = ["alarm_control_panel", "light", "sensor", "switch"]
SWITCH_PLATFORMS = ["switch"]
FAN_PLATFORMS = ["binary_sensor", "fan", "number", "select", "sensor", "switch"]
HUMIDIFIER_PLATFORMS = [
"binary_sensor",
"humidifier",
"number",
"select",
"sensor",
"switch",
]
LIGHT_PLATFORMS = ["light"]
VACUUM_PLATFORMS = ["binary_sensor", "sensor", "vacuum"]
AIR_MONITOR_PLATFORMS = ["air_quality", "sensor"]
MODEL_TO_CLASS_MAP = {
MODEL_FAN_1C: Fan1C,
MODEL_FAN_P10: FanP10,
MODEL_FAN_P11: FanP11,
MODEL_FAN_P5: FanP5,
MODEL_FAN_P9: FanP9,
MODEL_FAN_ZA5: FanZA5,
}
async def async_setup_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the Xiaomi Miio components from a config entry."""
hass.data.setdefault(DOMAIN, {})
if entry.data[CONF_FLOW_TYPE] == CONF_GATEWAY:
await async_setup_gateway_entry(hass, entry)
return True
return bool(
entry.data[CONF_FLOW_TYPE] != CONF_DEVICE
or await async_setup_device_entry(hass, entry)
)
@callback
def get_platforms(config_entry):
"""Return the platforms belonging to a config_entry."""
model = config_entry.data[CONF_MODEL]
flow_type = config_entry.data[CONF_FLOW_TYPE]
if flow_type == CONF_GATEWAY:
return GATEWAY_PLATFORMS
if flow_type == CONF_DEVICE:
if model in MODELS_SWITCH:
return SWITCH_PLATFORMS
if model in MODELS_HUMIDIFIER:
return HUMIDIFIER_PLATFORMS
if model in MODELS_FAN:
return FAN_PLATFORMS
if model in MODELS_LIGHT:
return LIGHT_PLATFORMS
for vacuum_model in MODELS_VACUUM:
if model.startswith(vacuum_model):
return VACUUM_PLATFORMS
for air_monitor_model in MODELS_AIR_MONITOR:
if model.startswith(air_monitor_model):
return AIR_MONITOR_PLATFORMS
_LOGGER.error(
"Unsupported device found! Please create an issue at "
"https://github.com/syssi/xiaomi_airpurifier/issues "
"and provide the following data: %s",
model,
)
return []
def _async_update_data_default(hass, device):
async def update():
"""Fetch data from the device using async_add_executor_job."""
async def _async_fetch_data():
"""Fetch data from the device."""
async with async_timeout.timeout(POLLING_TIMEOUT_SEC):
state = await hass.async_add_executor_job(device.status)
_LOGGER.debug("Got new state: %s", state)
return state
try:
return await _async_fetch_data()
except DeviceException as ex:
if getattr(ex, "code", None) != -9999:
raise UpdateFailed(ex) from ex
_LOGGER.info("Got exception while fetching the state, trying again: %s", ex)
# Try to fetch the data a second time after error code -9999
try:
return await _async_fetch_data()
except DeviceException as ex:
raise UpdateFailed(ex) from ex
return update
@dataclass(frozen=True)
class VacuumCoordinatorData:
"""A class that holds the vacuum data retrieved by the coordinator."""
status: VacuumStatus
dnd_status: DNDStatus
last_clean_details: CleaningDetails
consumable_status: ConsumableStatus
clean_history_status: CleaningSummary
timers: list[Timer]
fan_speeds: dict[str, int]
fan_speeds_reverse: dict[int, str]
@dataclass(init=False, frozen=True)
class VacuumCoordinatorDataAttributes:
"""
A class that holds attribute names for VacuumCoordinatorData.
These attributes can be used in methods like `getattr` when a generic solutions is
needed.
See homeassistant.components.xiaomi_miio.device.XiaomiCoordinatedMiioEntity
._extract_value_from_attribute for
an example.
"""
status: str = "status"
dnd_status: str = "dnd_status"
last_clean_details: str = "last_clean_details"
consumable_status: str = "consumable_status"
clean_history_status: str = "clean_history_status"
timer: str = "timer"
fan_speeds: str = "fan_speeds"
fan_speeds_reverse: str = "fan_speeds_reverse"
def _async_update_data_vacuum(hass, device: Vacuum):
def update() -> VacuumCoordinatorData:
timer = []
# See https://github.com/home-assistant/core/issues/38285 for reason on
# Why timers must be fetched separately.
try:
timer = device.timer()
except DeviceException as ex:
_LOGGER.debug(
"Unable to fetch timers, this may happen on some devices: %s", ex
)
fan_speeds = device.fan_speed_presets()
data = VacuumCoordinatorData(
device.status(),
device.dnd_status(),
device.last_clean_details(),
device.consumable_status(),
device.clean_history(),
timer,
fan_speeds,
{v: k for k, v in fan_speeds.items()},
)
return data
async def update_async():
"""Fetch data from the device using async_add_executor_job."""
async def execute_update():
async with async_timeout.timeout(POLLING_TIMEOUT_SEC):
state = await hass.async_add_executor_job(update)
_LOGGER.debug("Got new vacuum state: %s", state)
return state
try:
return await execute_update()
except DeviceException as ex:
if getattr(ex, "code", None) != -9999:
raise UpdateFailed(ex) from ex
_LOGGER.info("Got exception while fetching the state, trying again: %s", ex)
# Try to fetch the data a second time after error code -9999
try:
return await execute_update()
except DeviceException as ex:
raise UpdateFailed(ex) from ex
return update_async
async def async_create_miio_device_and_coordinator(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up a data coordinator and one miio device to service multiple entities."""
model = entry.data[CONF_MODEL]
host = entry.data[CONF_HOST]
token = entry.data[CONF_TOKEN]
name = entry.title
device = None
migrate = False
update_method = _async_update_data_default
coordinator_class = DataUpdateCoordinator
if (
model not in MODELS_HUMIDIFIER
and model not in MODELS_FAN
and model not in MODELS_VACUUM
):
return
_LOGGER.debug("Initializing with host %s (token %s...)", host, token[:5])
# Humidifiers
if model in MODELS_HUMIDIFIER_MIOT:
device = AirHumidifierMiot(host, token)
migrate = True
elif model in MODELS_HUMIDIFIER_MJJSQ:
device = AirHumidifierMjjsq(host, token, model=model)
migrate = True
elif model in MODELS_HUMIDIFIER_MIIO:
device = AirHumidifier(host, token, model=model)
migrate = True
# Airpurifiers and Airfresh
elif model in MODEL_AIRPURIFIER_3C:
device = AirPurifierMB4(host, token)
elif model in MODELS_PURIFIER_MIOT:
device = AirPurifierMiot(host, token)
elif model.startswith("zhimi.airpurifier."):
device = AirPurifier(host, token)
elif model.startswith("zhimi.airfresh."):
device = AirFresh(host, token)
elif model in MODELS_VACUUM:
device = Vacuum(host, token)
update_method = _async_update_data_vacuum
coordinator_class = DataUpdateCoordinator[VacuumCoordinatorData]
# Pedestal fans
elif model in MODEL_TO_CLASS_MAP:
device = MODEL_TO_CLASS_MAP[model](host, token)
elif model in MODELS_FAN_MIIO:
device = Fan(host, token, model=model)
else:
_LOGGER.error(
"Unsupported device found! Please create an issue at "
"https://github.com/syssi/xiaomi_airpurifier/issues "
"and provide the following data: %s",
model,
)
return
if migrate:
# Removing fan platform entity for humidifiers and migrate the name to the config entry for migration
entity_registry = er.async_get(hass)
entity_id = entity_registry.async_get_entity_id("fan", DOMAIN, entry.unique_id)
if entity_id:
# This check is entities that have a platform migration only and should be removed in the future
if migrate_entity_name := entity_registry.async_get(entity_id).name:
hass.config_entries.async_update_entry(entry, title=migrate_entity_name)
entity_registry.async_remove(entity_id)
# Create update miio device and coordinator
coordinator = coordinator_class(
hass,
_LOGGER,
name=name,
update_method=update_method(hass, device),
# Polling interval. Will only be polled if there are subscribers.
update_interval=UPDATE_INTERVAL,
)
hass.data[DOMAIN][entry.entry_id] = {
KEY_DEVICE: device,
KEY_COORDINATOR: coordinator,
}
# Trigger first data fetch
await coordinator.async_config_entry_first_refresh()
async def async_setup_gateway_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the Xiaomi Gateway component from a config entry."""
host = entry.data[CONF_HOST]
token = entry.data[CONF_TOKEN]
name = entry.title
gateway_id = entry.unique_id
# For backwards compat
if entry.unique_id.endswith("-gateway"):
hass.config_entries.async_update_entry(entry, unique_id=entry.data["mac"])
entry.async_on_unload(entry.add_update_listener(update_listener))
# Connect to gateway
gateway = ConnectXiaomiGateway(hass, entry)
try:
await gateway.async_connect_gateway(host, token)
except AuthException as error:
raise ConfigEntryAuthFailed() from error
except SetupException as error:
raise ConfigEntryNotReady() from error
gateway_info = gateway.gateway_info
gateway_model = f"{gateway_info.model}-{gateway_info.hardware_version}"
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, gateway_info.mac_address)},
identifiers={(DOMAIN, gateway_id)},
manufacturer="Xiaomi",
name=name,
model=gateway_model,
sw_version=gateway_info.firmware_version,
)
def update_data():
"""Fetch data from the subdevice."""
data = {}
for sub_device in gateway.gateway_device.devices.values():
try:
sub_device.update()
except GatewayException as ex:
_LOGGER.error("Got exception while fetching the state: %s", ex)
data[sub_device.sid] = {ATTR_AVAILABLE: False}
else:
data[sub_device.sid] = {ATTR_AVAILABLE: True}
return data
async def async_update_data():
"""Fetch data from the subdevice using async_add_executor_job."""
return await hass.async_add_executor_job(update_data)
# Create update coordinator
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=name,
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=UPDATE_INTERVAL,
)
hass.data[DOMAIN][entry.entry_id] = {
CONF_GATEWAY: gateway.gateway_device,
KEY_COORDINATOR: coordinator,
}
for platform in GATEWAY_PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
async def async_setup_device_entry(
hass: core.HomeAssistant, entry: config_entries.ConfigEntry
):
"""Set up the Xiaomi Miio device component from a config entry."""
platforms = get_platforms(entry)
await async_create_miio_device_and_coordinator(hass, entry)
if not platforms:
return False
entry.async_on_unload(entry.add_update_listener(update_listener))
hass.config_entries.async_setup_platforms(entry, platforms)
return True
async def async_unload_entry(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Unload a config entry."""
platforms = get_platforms(config_entry)
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, platforms
)
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
return unload_ok
async def update_listener(
hass: core.HomeAssistant, config_entry: config_entries.ConfigEntry
):
"""Handle options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
| {
"content_hash": "cf5d1c28a62a80e7361ca370e0dffdae",
"timestamp": "",
"source": "github",
"line_count": 466,
"max_line_length": 109,
"avg_line_length": 31.255364806866954,
"alnum_prop": 0.6488843117061449,
"repo_name": "aronsky/home-assistant",
"id": "de5baf69683ab5fbcf6cab5dd78e151f8b6e1595",
"size": "14565",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/xiaomi_miio/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2443"
},
{
"name": "Python",
"bytes": "38448521"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
} |
class MetaMixin(object):
"""Safe way to store meta information related to class object.
We are storing information in class object instead of the instance.
Information is stored in dict that is initialized only once during the
load of module, it means that all subclasses of this class will point to
the same dict object with the information.
Sample that explains why it's important to use MetaMixin:
>>> # Using direct fields
>>>
>>> class A(object):
>>> _meta = {}
>>>
>>> class B(A):
>>> pass
>>>
>>> B._meta["a"] = 10
>>> assert A._meta["a"] == 10 # We changed meta of base class, which
# is going to produce nasty bugs
>>> # MetaMixin in action
>>>
>>> class A(MetaMixin):
>>> pass
>>>
>>> class B(A):
>>> pass
>>>
>>> A._meta_set("a", 10) # Raises ReferenceError
>>> A._meta_init()
>>> A._meta_set("a", 10) # Set meta field "a"
>>>
>>> B._meta_get("a") # Raises ReferenceError
>>> B._meta_init()
>>> B._meta_set("a", 20) # Set meta field "a"
>>>
>>> assert A._meta_get("a") == 10
>>> assert B._meta_get("a") == 20
"""
@classmethod
def _meta_init(cls):
"""Initialize meta for this class."""
cls._meta = {}
@classmethod
def _meta_clear(cls):
cls._meta.clear() # NOTE(boris-42): make sure that meta is deleted
delattr(cls, "_meta")
@classmethod
def _meta_is_inited(cls, raise_exc=True):
"""Check if meta is initialized.
It means that this class has own cls._meta object (not pointer
to parent cls._meta)
To do this we should check 2 things:
1) cls has attribute _meta
2) cls._meta is not pointer to parent._meta
"""
if (not hasattr(cls, "_meta")
or cls._meta is getattr(super(cls, cls), "_meta", None)):
if raise_exc:
raise ReferenceError(
"Trying to use MetaMixin before initialization %s. "
"Call _meta_init() before using it" % cls)
return False
return True
@classmethod
def _meta_get(cls, key, default=None):
"""Get value corresponding to key in meta data."""
cls._meta_is_inited()
return cls._meta.get(key, default)
@classmethod
def _meta_set(cls, key, value):
"""Set value for key in meta."""
cls._meta_is_inited()
cls._meta[key] = value
@classmethod
def _meta_setdefault(cls, key, value):
"""Set default value for key in meta."""
cls._meta_is_inited()
cls._meta.setdefault(key, value)
| {
"content_hash": "8ca4a4e41aa18f7c9404066645ea6274",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 77,
"avg_line_length": 31.61111111111111,
"alnum_prop": 0.5216168717047451,
"repo_name": "eonpatapon/rally",
"id": "36727ccc3d5999210e04e907cde02977a7dea780",
"size": "3476",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "rally/common/plugin/meta.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "46737"
},
{
"name": "Python",
"bytes": "2566072"
},
{
"name": "Shell",
"bytes": "43361"
}
],
"symlink_target": ""
} |
"""Flock platform for notify component."""
import asyncio
import logging
import async_timeout
import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.components.notify import (PLATFORM_SCHEMA,
BaseNotificationService)
_LOGGER = logging.getLogger(__name__)
_RESOURCE = 'https://api.flock.com/hooks/sendMessage/'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_ACCESS_TOKEN): cv.string,
})
async def get_service(hass, config, discovery_info=None):
"""Get the Flock notification service."""
access_token = config.get(CONF_ACCESS_TOKEN)
url = '{}{}'.format(_RESOURCE, access_token)
session = async_get_clientsession(hass)
return FlockNotificationService(url, session, hass.loop)
class FlockNotificationService(BaseNotificationService):
"""Implement the notification service for Flock."""
def __init__(self, url, session, loop):
"""Initialize the Flock notification service."""
self._loop = loop
self._url = url
self._session = session
async def async_send_message(self, message, **kwargs):
"""Send the message to the user."""
payload = {'text': message}
_LOGGER.debug("Attempting to call Flock at %s", self._url)
try:
with async_timeout.timeout(10, loop=self._loop):
response = await self._session.post(self._url, json=payload)
result = await response.json()
if response.status != 200 or 'error' in result:
_LOGGER.error(
"Flock service returned HTTP status %d, response %s",
response.status, result)
except asyncio.TimeoutError:
_LOGGER.error("Timeout accessing Flock at %s", self._url)
| {
"content_hash": "c3e1b7df5b373e81d907bad5b7e22e0d",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 76,
"avg_line_length": 34.26315789473684,
"alnum_prop": 0.65284178187404,
"repo_name": "MartinHjelmare/home-assistant",
"id": "384bf26599ad221ebd78322af3e670dcdd6db374",
"size": "1953",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "homeassistant/components/flock/notify.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "Python",
"bytes": "15222591"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17609"
}
],
"symlink_target": ""
} |
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_server_error(e):
return render_template('500.html'), 500 | {
"content_hash": "c6b420853801fb2abe6e179fa3bd1fde",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 43,
"avg_line_length": 23.5,
"alnum_prop": 0.7180851063829787,
"repo_name": "Walk-ContryRoad/Lemon",
"id": "76eab7f5da586905254b346499098622d8ccbbaa",
"size": "188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lemon_master/app/main/errors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "372731"
},
{
"name": "HTML",
"bytes": "113593"
},
{
"name": "JavaScript",
"bytes": "50222"
},
{
"name": "Python",
"bytes": "72783"
},
{
"name": "Shell",
"bytes": "1227"
}
],
"symlink_target": ""
} |
"""Helpers to easily generate properly formatted remarkup."""
# =============================================================================
# CONTENTS
# -----------------------------------------------------------------------------
# phlcon_remarkup
#
# Public Functions:
# code_block
# dict_to_table
# encased
# bold
# italic
# monospaced
# deleted
# link
#
# -----------------------------------------------------------------------------
# (this contents block is generated, edits will be lost)
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def code_block(message, lang=None, name=None, lines=None, isBad=False):
"""Return a string code block.
Note that the code block should start on a new line and will insert
a blank line at the start and end of itself. If there is already a
preceeding blank line then the block will not display correctly.
e.g. good:
"hello\\n" + code_block("goodbye")
e.g. bad:
"hello" + code_block("goodbye")
e.g. bad:
"hello\\n\\n" + code_block("goodbye")
>>> code_block("hello")
'\\n```hello```\\n\\n'
:message: the string to be formatted as a code block
:lang: language that the code block is in, e.g. 'text', 'html'
:name: name of the file the block represents
:lines: number of lines to limit the viewport to (will have scrollbars)
:isBad: whether to format the snippet as a 'counter example'
:returns: string of the new wrapped code block
"""
block = "\n```"
attributes = []
if lang is not None:
attributes.append("lang=" + lang)
if name is not None:
attributes.append("name=" + name)
if lines is not None:
attributes.append("lines=" + str(int(lines)))
if isBad:
attributes.append("counterexample")
if attributes:
block += ', '.join(attributes) + "\n"
block += message + "```\n\n"
return block
def dict_to_table(dictionary):
"""Return a string table from the supplied dictionary.
Note that the table should start on a new line and will insert a blank
line at the start of itself and at the end.
e.g. good:
"hello\n" + dict_to_table({"a": "b"})
e.g. bad:
"hello" + dict_to_table({"a": "b"})
>>> dict_to_table({"a": "b"})
'\\n| **a** | b |\\n\\n'
>>> dict_to_table({})
''
:dictionary: the string to be formatted as a code block
:returns: string of the new wrapped code block
"""
block = ""
if dictionary:
block = "\n"
for (key, value) in dictionary.iteritems():
block += "| **" + str(key) + "** | " + str(value) + " |\n"
block += "\n"
return block
def encased(message, marker):
"""Return string 'message' encased in specified marker at both ends.
>>> encased("hello", "**")
'**hello**'
:message: the string to be encased
:marker: the string to encase with
:returns: 'message' encased in markers
"""
return marker + message + marker
def bold(message):
"""Return string 'message' encased in bold markers.
>>> bold("hello")
'**hello**'
:message: the string to be encased
:returns: 'message' encased in bold markers
"""
return encased(message, "**")
def italic(message):
"""Return string 'message' encased in italic markers.
>>> italic("hello")
'//hello//'
:message: the string to be encased
:returns: 'message' encased in italic markers
"""
return encased(message, "//")
def monospaced(message):
"""Return string 'message' encased in monospace markers.
>>> monospaced("hello")
'`hello`'
:message: the string to be encased
:returns: 'message' encased in monospace markers
"""
return encased(message, "`")
def deleted(message):
"""Return string 'message' encased in deleted markers.
>>> deleted("hello")
'~~hello~~'
:message: the string to be encased
:returns: 'message' encased in deleted markers
"""
return encased(message, "~~")
def link(url):
"""Return 'url' explicitly remarked up as a link.
>>> link("http://server.test/")
'[[http://server.test/]]'
:url: the string to be remarked up
:returns: 'url' encased in appropriate remarkup
"""
return "[[{url}]]".format(url=url)
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
| {
"content_hash": "1cf04b0d6ce69587535f544f616aa622",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 79,
"avg_line_length": 26.11,
"alnum_prop": 0.5683646112600537,
"repo_name": "aevri/phabricator-tools",
"id": "d2b7659cdece92e705fda761bedb94c9edef51d6",
"size": "5222",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "py/phl/phlcon_remarkup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "342"
},
{
"name": "HTML",
"bytes": "471"
},
{
"name": "Puppet",
"bytes": "4016"
},
{
"name": "Python",
"bytes": "1069073"
},
{
"name": "Ruby",
"bytes": "1945"
},
{
"name": "Shell",
"bytes": "135331"
}
],
"symlink_target": ""
} |
from copy import copy
import warnings
import pytz
import pandas as pd
import numpy as np
from datetime import datetime
from itertools import groupby, chain
from six.moves import filter
from six import (
exec_,
iteritems,
string_types,
)
from operator import attrgetter
from zipline.errors import (
OrderDuringInitialize,
OverrideCommissionPostInit,
OverrideSlippagePostInit,
RegisterTradingControlPostInit,
RegisterAccountControlPostInit,
UnsupportedCommissionModel,
UnsupportedOrderParameters,
UnsupportedSlippageModel,
)
from zipline.finance.trading import TradingEnvironment
from zipline.finance.blotter import Blotter
from zipline.finance.commission import PerShare, PerTrade, PerDollar
from zipline.finance.controls import (
LongOnly,
MaxOrderCount,
MaxOrderSize,
MaxPositionSize,
MaxLeverage,
RestrictedListOrder
)
from zipline.finance.execution import (
LimitOrder,
MarketOrder,
StopLimitOrder,
StopOrder,
)
from zipline.finance.performance import PerformanceTracker
from zipline.finance.slippage import (
VolumeShareSlippage,
SlippageModel,
transact_partial
)
from zipline.assets import Asset, Future
from zipline.assets.futures import FutureChain
from zipline.gens.composites import date_sorted_sources
from zipline.gens.tradesimulation import AlgorithmSimulator
from zipline.sources import DataFrameSource, DataPanelSource
from zipline.utils.api_support import ZiplineAPI, api_method
import zipline.utils.events
from zipline.utils.events import (
EventManager,
make_eventrule,
DateRuleFactory,
TimeRuleFactory,
)
from zipline.utils.factory import create_simulation_parameters
from zipline.utils.math_utils import tolerant_equals
import zipline.protocol
from zipline.protocol import Event
from zipline.history import HistorySpec
from zipline.history.history_container import HistoryContainer
DEFAULT_CAPITAL_BASE = float("1.0e5")
class TradingAlgorithm(object):
"""
Base class for trading algorithms. Inherit and overload
initialize() and handle_data(data).
A new algorithm could look like this:
```
from zipline.api import order, symbol
def initialize(context):
context.sid = symbol('AAPL')
context.amount = 100
def handle_data(context, data):
sid = context.sid
amount = context.amount
order(sid, amount)
```
To then to run this algorithm pass these functions to
TradingAlgorithm:
my_algo = TradingAlgorithm(initialize, handle_data)
stats = my_algo.run(data)
"""
def __init__(self, *args, **kwargs):
"""Initialize sids and other state variables.
:Arguments:
:Optional:
initialize : function
Function that is called with a single
argument at the begninning of the simulation.
handle_data : function
Function that is called with 2 arguments
(context and data) on every bar.
script : str
Algoscript that contains initialize and
handle_data function definition.
data_frequency : {'daily', 'minute'}
The duration of the bars.
capital_base : float <default: 1.0e5>
How much capital to start with.
instant_fill : bool <default: False>
Whether to fill orders immediately or on next bar.
asset_finder : An AssetFinder object
A new AssetFinder object to be used in this TradingEnvironment
asset_metadata: can be either:
- dict
- pandas.DataFrame
- object with 'read' property
If dict is provided, it must have the following structure:
* keys are the identifiers
* values are dicts containing the metadata, with the metadata
field name as the key
If pandas.DataFrame is provided, it must have the
following structure:
* column names must be the metadata fields
* index must be the different asset identifiers
* array contents should be the metadata value
If an object with a 'read' property is provided, 'read' must
return rows containing at least one of 'sid' or 'symbol' along
with the other metadata fields.
identifiers : List
Any asset identifiers that are not provided in the
asset_metadata, but will be traded by this TradingAlgorithm
"""
self.sources = []
# List of trading controls to be used to validate orders.
self.trading_controls = []
# List of account controls to be checked on each bar.
self.account_controls = []
self._recorded_vars = {}
self.namespace = kwargs.get('namespace', {})
self._platform = kwargs.pop('platform', 'zipline')
self.logger = None
self.benchmark_return_source = None
# default components for transact
self.slippage = VolumeShareSlippage()
self.commission = PerShare()
self.instant_fill = kwargs.pop('instant_fill', False)
# set the capital base
self.capital_base = kwargs.pop('capital_base', DEFAULT_CAPITAL_BASE)
self.sim_params = kwargs.pop('sim_params', None)
if self.sim_params is None:
self.sim_params = create_simulation_parameters(
capital_base=self.capital_base,
start=kwargs.pop('start', None),
end=kwargs.pop('end', None)
)
self.perf_tracker = PerformanceTracker(self.sim_params)
# Update the TradingEnvironment with the provided asset metadata
self.trading_environment = kwargs.pop('env',
TradingEnvironment.instance())
self.trading_environment.update_asset_finder(
asset_finder=kwargs.pop('asset_finder', None),
asset_metadata=kwargs.pop('asset_metadata', None),
identifiers=kwargs.pop('identifiers', None)
)
# Pull in the environment's new AssetFinder for quick reference
self.asset_finder = self.trading_environment.asset_finder
self.blotter = kwargs.pop('blotter', None)
if not self.blotter:
self.blotter = Blotter()
# Set the dt initally to the period start by forcing it to change
self.on_dt_changed(self.sim_params.period_start)
self.portfolio_needs_update = True
self.account_needs_update = True
self.performance_needs_update = True
self._portfolio = None
self._account = None
self.history_container_class = kwargs.pop(
'history_container_class', HistoryContainer,
)
self.history_container = None
self.history_specs = {}
# If string is passed in, execute and get reference to
# functions.
self.algoscript = kwargs.pop('script', None)
self._initialize = None
self._before_trading_start = None
self._analyze = None
self.event_manager = EventManager()
if self.algoscript is not None:
filename = kwargs.pop('algo_filename', None)
if filename is None:
filename = '<string>'
code = compile(self.algoscript, filename, 'exec')
exec_(code, self.namespace)
self._initialize = self.namespace.get('initialize')
if 'handle_data' not in self.namespace:
raise ValueError('You must define a handle_data function.')
else:
self._handle_data = self.namespace['handle_data']
self._before_trading_start = \
self.namespace.get('before_trading_start')
# Optional analyze function, gets called after run
self._analyze = self.namespace.get('analyze')
elif kwargs.get('initialize') and kwargs.get('handle_data'):
if self.algoscript is not None:
raise ValueError('You can not set script and \
initialize/handle_data.')
self._initialize = kwargs.pop('initialize')
self._handle_data = kwargs.pop('handle_data')
self._before_trading_start = kwargs.pop('before_trading_start',
None)
self.event_manager.add_event(
zipline.utils.events.Event(
zipline.utils.events.Always(),
# We pass handle_data.__func__ to get the unbound method.
# We will explicitly pass the algorithm to bind it again.
self.handle_data.__func__,
),
prepend=True,
)
# If method not defined, NOOP
if self._initialize is None:
self._initialize = lambda x: None
# Alternative way of setting data_frequency for backwards
# compatibility.
if 'data_frequency' in kwargs:
self.data_frequency = kwargs.pop('data_frequency')
self._most_recent_data = None
# Prepare the algo for initialization
self.initialized = False
self.initialize_args = args
self.initialize_kwargs = kwargs
def initialize(self, *args, **kwargs):
"""
Call self._initialize with `self` made available to Zipline API
functions.
"""
with ZiplineAPI(self):
self._initialize(self)
def before_trading_start(self):
if self._before_trading_start is None:
return
self._before_trading_start(self)
def handle_data(self, data):
self._most_recent_data = data
if self.history_container:
self.history_container.update(data, self.datetime)
self._handle_data(self, data)
# Unlike trading controls which remain constant unless placing an
# order, account controls can change each bar. Thus, must check
# every bar no matter if the algorithm places an order or not.
self.validate_account_controls()
def analyze(self, perf):
if self._analyze is None:
return
with ZiplineAPI(self):
self._analyze(self, perf)
def __repr__(self):
"""
N.B. this does not yet represent a string that can be used
to instantiate an exact copy of an algorithm.
However, it is getting close, and provides some value as something
that can be inspected interactively.
"""
return """
{class_name}(
capital_base={capital_base}
sim_params={sim_params},
initialized={initialized},
slippage={slippage},
commission={commission},
blotter={blotter},
recorded_vars={recorded_vars})
""".strip().format(class_name=self.__class__.__name__,
capital_base=self.capital_base,
sim_params=repr(self.sim_params),
initialized=self.initialized,
slippage=repr(self.slippage),
commission=repr(self.commission),
blotter=repr(self.blotter),
recorded_vars=repr(self.recorded_vars))
def _create_data_generator(self, source_filter, sim_params=None):
"""
Create a merged data generator using the sources attached to this
algorithm.
::source_filter:: is a method that receives events in date
sorted order, and returns True for those events that should be
processed by the zipline, and False for those that should be
skipped.
"""
if sim_params is None:
sim_params = self.sim_params
if self.benchmark_return_source is None:
if sim_params.data_frequency == 'minute' or \
sim_params.emission_rate == 'minute':
def update_time(date):
return self.trading_environment.get_open_and_close(date)[1]
else:
def update_time(date):
return date
benchmark_return_source = [
Event({'dt': update_time(dt),
'returns': ret,
'type': zipline.protocol.DATASOURCE_TYPE.BENCHMARK,
'source_id': 'benchmarks'})
for dt, ret in
self.trading_environment.benchmark_returns.iteritems()
if dt.date() >= sim_params.period_start.date() and
dt.date() <= sim_params.period_end.date()
]
else:
benchmark_return_source = self.benchmark_return_source
date_sorted = date_sorted_sources(*self.sources)
if source_filter:
date_sorted = filter(source_filter, date_sorted)
with_benchmarks = date_sorted_sources(benchmark_return_source,
date_sorted)
# Group together events with the same dt field. This depends on the
# events already being sorted.
return groupby(with_benchmarks, attrgetter('dt'))
def _create_generator(self, sim_params, source_filter=None):
"""
Create a basic generator setup using the sources to this algorithm.
::source_filter:: is a method that receives events in date
sorted order, and returns True for those events that should be
processed by the zipline, and False for those that should be
skipped.
"""
if not self.initialized:
self.initialize(*self.initialize_args, **self.initialize_kwargs)
self.initialized = True
if self.perf_tracker is None:
# HACK: When running with the `run` method, we set perf_tracker to
# None so that it will be overwritten here.
self.perf_tracker = PerformanceTracker(sim_params)
self.portfolio_needs_update = True
self.account_needs_update = True
self.performance_needs_update = True
self.data_gen = self._create_data_generator(source_filter, sim_params)
self.trading_client = AlgorithmSimulator(self, sim_params)
transact_method = transact_partial(self.slippage, self.commission)
self.set_transact(transact_method)
return self.trading_client.transform(self.data_gen)
def get_generator(self):
"""
Override this method to add new logic to the construction
of the generator. Overrides can use the _create_generator
method to get a standard construction generator.
"""
return self._create_generator(self.sim_params)
# TODO: make a new subclass, e.g. BatchAlgorithm, and move
# the run method to the subclass, and refactor to put the
# generator creation logic into get_generator.
def run(self, source, overwrite_sim_params=True,
benchmark_return_source=None):
"""Run the algorithm.
:Arguments:
source : can be either:
- pandas.DataFrame
- zipline source
- list of sources
If pandas.DataFrame is provided, it must have the
following structure:
* column names must be the different asset identifiers
* index must be DatetimeIndex
* array contents should be price info.
:Returns:
daily_stats : pandas.DataFrame
Daily performance metrics such as returns, alpha etc.
"""
# Ensure that source is a DataSource object
if isinstance(source, list):
if overwrite_sim_params:
warnings.warn("""List of sources passed, will not attempt to extract start and end
dates. Make sure to set the correct fields in sim_params passed to
__init__().""", UserWarning)
overwrite_sim_params = False
elif isinstance(source, pd.DataFrame):
# if DataFrame provided, map columns to sids and wrap
# in DataFrameSource
copy_frame = source.copy()
copy_frame.columns = \
self.asset_finder.map_identifier_index_to_sids(
source.columns, source.index[0]
)
source = DataFrameSource(copy_frame)
elif isinstance(source, pd.Panel):
# If Panel provided, map items to sids and wrap
# in DataPanelSource
copy_panel = source.copy()
copy_panel.items = self.asset_finder.map_identifier_index_to_sids(
source.items, source.major_axis[0]
)
source = DataPanelSource(copy_panel)
if isinstance(source, list):
self.set_sources(source)
else:
self.set_sources([source])
# Override sim_params if params are provided by the source.
if overwrite_sim_params:
if hasattr(source, 'start'):
self.sim_params.period_start = source.start
if hasattr(source, 'end'):
self.sim_params.period_end = source.end
# Changing period_start and period_close might require updating
# of first_open and last_close.
self.sim_params._update_internal()
# The sids field of the source is the reference for the universe at
# the start of the run
self._current_universe = set()
for source in self.sources:
for sid in source.sids:
self._current_universe.add(sid)
# Check that all sids from the source are accounted for in
# the AssetFinder. This retrieve call will raise an exception if the
# sid is not found.
for sid in self._current_universe:
self.asset_finder.retrieve_asset(sid)
# force a reset of the performance tracker, in case
# this is a repeat run of the algorithm.
self.perf_tracker = None
# create zipline
self.gen = self._create_generator(self.sim_params)
# Create history containers
if self.history_specs:
self.history_container = self.history_container_class(
self.history_specs,
self.current_universe(),
self.sim_params.first_open,
self.sim_params.data_frequency,
)
# loop through simulated_trading, each iteration returns a
# perf dictionary
perfs = []
for perf in self.gen:
perfs.append(perf)
# convert perf dict to pandas dataframe
daily_stats = self._create_daily_stats(perfs)
self.analyze(daily_stats)
return daily_stats
def _create_daily_stats(self, perfs):
# create daily and cumulative stats dataframe
daily_perfs = []
# TODO: the loop here could overwrite expected properties
# of daily_perf. Could potentially raise or log a
# warning.
for perf in perfs:
if 'daily_perf' in perf:
perf['daily_perf'].update(
perf['daily_perf'].pop('recorded_vars')
)
perf['daily_perf'].update(perf['cumulative_risk_metrics'])
daily_perfs.append(perf['daily_perf'])
else:
self.risk_report = perf
daily_dts = [np.datetime64(perf['period_close'], utc=True)
for perf in daily_perfs]
daily_stats = pd.DataFrame(daily_perfs, index=daily_dts)
return daily_stats
@api_method
def add_transform(self, transform, days=None):
"""
Ensures that the history container will have enough size to service
a simple transform.
:Arguments:
transform : string
The transform to add. must be an element of:
{'mavg', 'stddev', 'vwap', 'returns'}.
days : int <default=None>
The maximum amount of days you will want for this transform.
This is not needed for 'returns'.
"""
if transform not in {'mavg', 'stddev', 'vwap', 'returns'}:
raise ValueError('Invalid transform')
if transform == 'returns':
if days is not None:
raise ValueError('returns does use days')
self.add_history(2, '1d', 'price')
return
elif days is None:
raise ValueError('no number of days specified')
if self.sim_params.data_frequency == 'daily':
mult = 1
freq = '1d'
else:
mult = 390
freq = '1m'
bars = mult * days
self.add_history(bars, freq, 'price')
if transform == 'vwap':
self.add_history(bars, freq, 'volume')
@api_method
def get_environment(self, field='platform'):
env = {
'arena': self.sim_params.arena,
'data_frequency': self.sim_params.data_frequency,
'start': self.sim_params.first_open,
'end': self.sim_params.last_close,
'capital_base': self.sim_params.capital_base,
'platform': self._platform
}
if field == '*':
return env
else:
return env[field]
def add_event(self, rule=None, callback=None):
"""
Adds an event to the algorithm's EventManager.
"""
self.event_manager.add_event(
zipline.utils.events.Event(rule, callback),
)
@api_method
def schedule_function(self,
func,
date_rule=None,
time_rule=None,
half_days=True):
"""
Schedules a function to be called with some timed rules.
"""
date_rule = date_rule or DateRuleFactory.every_day()
time_rule = ((time_rule or TimeRuleFactory.market_open())
if self.sim_params.data_frequency == 'minute' else
# If we are in daily mode the time_rule is ignored.
zipline.utils.events.Always())
self.add_event(
make_eventrule(date_rule, time_rule, half_days),
func,
)
@api_method
def record(self, *args, **kwargs):
"""
Track and record local variable (i.e. attributes) each day.
"""
# Make 2 objects both referencing the same iterator
args = [iter(args)] * 2
# Zip generates list entries by calling `next` on each iterator it
# receives. In this case the two iterators are the same object, so the
# call to next on args[0] will also advance args[1], resulting in zip
# returning (a,b) (c,d) (e,f) rather than (a,a) (b,b) (c,c) etc.
positionals = zip(*args)
for name, value in chain(positionals, iteritems(kwargs)):
self._recorded_vars[name] = value
@api_method
def symbol(self, symbol_str):
"""
Default symbol lookup for any source that directly maps the
symbol to the Asset (e.g. yahoo finance).
"""
return self.asset_finder.lookup_symbol_resolve_multiple(
symbol_str,
as_of_date=self.datetime)
@api_method
def symbols(self, *args):
"""
Default symbols lookup for any source that directly maps the
symbol to the Asset (e.g. yahoo finance).
"""
return [self.symbol(identifier) for identifier in args]
@api_method
def sid(self, a_sid):
"""
Default sid lookup for any source that directly maps the integer sid
to the Asset.
"""
return self.asset_finder.retrieve_asset(a_sid)
@api_method
def future_chain(self, root_symbol, as_of_date=None):
""" Look up a future chain with the specified parameters.
Parameters
----------
root_symbol : str
The root symbol of a future chain.
as_of_date : datetime.datetime or pandas.Timestamp or str, optional
Date at which the chain determination is rooted. I.e. the
existing contract whose notice date is first after this date is
the primary contract, etc.
Returns
-------
FutureChain
The future chain matching the specified parameters.
Raises
------
RootSymbolNotFound
If a future chain could not be found for the given root symbol.
"""
return FutureChain(
asset_finder=self.asset_finder,
get_datetime=self.get_datetime,
root_symbol=root_symbol.upper(),
as_of_date=as_of_date
)
def _calculate_order_value_amount(self, asset, value):
"""
Calculates how many shares/contracts to order based on the type of
asset being ordered.
"""
last_price = self.trading_client.current_data[asset].price
if tolerant_equals(last_price, 0):
zero_message = "Price of 0 for {psid}; can't infer value".format(
psid=asset
)
if self.logger:
self.logger.debug(zero_message)
# Don't place any order
return 0
if isinstance(asset, Future):
value_multiplier = asset.contract_multiplier
else:
value_multiplier = 1
return value / (last_price * value_multiplier)
@api_method
def order(self, sid, amount,
limit_price=None,
stop_price=None,
style=None):
"""
Place an order using the specified parameters.
"""
def round_if_near_integer(a, epsilon=1e-4):
"""
Round a to the nearest integer if that integer is within an epsilon
of a.
"""
if abs(a - round(a)) <= epsilon:
return round(a)
else:
return a
# Truncate to the integer share count that's either within .0001 of
# amount or closer to zero.
# E.g. 3.9999 -> 4.0; 5.5 -> 5.0; -5.5 -> -5.0
amount = int(round_if_near_integer(amount))
# Raises a ZiplineError if invalid parameters are detected.
self.validate_order_params(sid,
amount,
limit_price,
stop_price,
style)
# Convert deprecated limit_price and stop_price parameters to use
# ExecutionStyle objects.
style = self.__convert_order_params_for_blotter(limit_price,
stop_price,
style)
return self.blotter.order(sid, amount, style)
def validate_order_params(self,
asset,
amount,
limit_price,
stop_price,
style):
"""
Helper method for validating parameters to the order API function.
Raises an UnsupportedOrderParameters if invalid arguments are found.
"""
if not self.initialized:
raise OrderDuringInitialize(
msg="order() can only be called from within handle_data()"
)
if style:
if limit_price:
raise UnsupportedOrderParameters(
msg="Passing both limit_price and style is not supported."
)
if stop_price:
raise UnsupportedOrderParameters(
msg="Passing both stop_price and style is not supported."
)
if not isinstance(asset, Asset):
raise UnsupportedOrderParameters(
msg="Passing non-Asset argument to 'order()' is not supported."
" Use 'sid()' or 'symbol()' methods to look up an Asset."
)
for control in self.trading_controls:
control.validate(asset,
amount,
self.updated_portfolio(),
self.get_datetime(),
self.trading_client.current_data)
@staticmethod
def __convert_order_params_for_blotter(limit_price, stop_price, style):
"""
Helper method for converting deprecated limit_price and stop_price
arguments into ExecutionStyle instances.
This function assumes that either style == None or (limit_price,
stop_price) == (None, None).
"""
# TODO_SS: DeprecationWarning for usage of limit_price and stop_price.
if style:
assert (limit_price, stop_price) == (None, None)
return style
if limit_price and stop_price:
return StopLimitOrder(limit_price, stop_price)
if limit_price:
return LimitOrder(limit_price)
if stop_price:
return StopOrder(stop_price)
else:
return MarketOrder()
@api_method
def order_value(self, sid, value,
limit_price=None, stop_price=None, style=None):
"""
Place an order by desired value rather than desired number of shares.
If the requested sid is found in the universe, the requested value is
divided by its price to imply the number of shares to transact.
If the Asset being ordered is a Future, the 'value' calculated
is actually the exposure, as Futures have no 'value'.
value > 0 :: Buy/Cover
value < 0 :: Sell/Short
Market order: order(sid, value)
Limit order: order(sid, value, limit_price)
Stop order: order(sid, value, None, stop_price)
StopLimit order: order(sid, value, limit_price, stop_price)
"""
amount = self._calculate_order_value_amount(sid, value)
return self.order(sid, amount,
limit_price=limit_price,
stop_price=stop_price,
style=style)
@property
def recorded_vars(self):
return copy(self._recorded_vars)
@property
def portfolio(self):
return self.updated_portfolio()
def updated_portfolio(self):
if self.portfolio_needs_update:
self._portfolio = \
self.perf_tracker.get_portfolio(self.performance_needs_update)
self.portfolio_needs_update = False
self.performance_needs_update = False
return self._portfolio
@property
def account(self):
return self.updated_account()
def updated_account(self):
if self.account_needs_update:
self._account = \
self.perf_tracker.get_account(self.performance_needs_update)
self.account_needs_update = False
self.performance_needs_update = False
return self._account
def set_logger(self, logger):
self.logger = logger
def on_dt_changed(self, dt):
"""
Callback triggered by the simulation loop whenever the current dt
changes.
Any logic that should happen exactly once at the start of each datetime
group should happen here.
"""
assert isinstance(dt, datetime), \
"Attempt to set algorithm's current time with non-datetime"
assert dt.tzinfo == pytz.utc, \
"Algorithm expects a utc datetime"
self.datetime = dt
self.perf_tracker.set_date(dt)
self.blotter.set_date(dt)
@api_method
def get_datetime(self, tz=None):
"""
Returns the simulation datetime.
"""
dt = self.datetime
assert dt.tzinfo == pytz.utc, "Algorithm should have a utc datetime"
if tz is not None:
# Convert to the given timezone passed as a string or tzinfo.
if isinstance(tz, string_types):
tz = pytz.timezone(tz)
dt = dt.astimezone(tz)
return dt # datetime.datetime objects are immutable.
def set_transact(self, transact):
"""
Set the method that will be called to create a
transaction from open orders and trade events.
"""
self.blotter.transact = transact
def update_dividends(self, dividend_frame):
"""
Set DataFrame used to process dividends. DataFrame columns should
contain at least the entries in zp.DIVIDEND_FIELDS.
"""
self.perf_tracker.update_dividends(dividend_frame)
@api_method
def set_slippage(self, slippage):
if not isinstance(slippage, SlippageModel):
raise UnsupportedSlippageModel()
if self.initialized:
raise OverrideSlippagePostInit()
self.slippage = slippage
@api_method
def set_commission(self, commission):
if not isinstance(commission, (PerShare, PerTrade, PerDollar)):
raise UnsupportedCommissionModel()
if self.initialized:
raise OverrideCommissionPostInit()
self.commission = commission
def set_sources(self, sources):
assert isinstance(sources, list)
self.sources = sources
# Remain backwards compatibility
@property
def data_frequency(self):
return self.sim_params.data_frequency
@data_frequency.setter
def data_frequency(self, value):
assert value in ('daily', 'minute')
self.sim_params.data_frequency = value
@api_method
def order_percent(self, sid, percent,
limit_price=None, stop_price=None, style=None):
"""
Place an order in the specified asset corresponding to the given
percent of the current portfolio value.
Note that percent must expressed as a decimal (0.50 means 50\%).
"""
value = self.portfolio.portfolio_value * percent
return self.order_value(sid, value,
limit_price=limit_price,
stop_price=stop_price,
style=style)
@api_method
def order_target(self, sid, target,
limit_price=None, stop_price=None, style=None):
"""
Place an order to adjust a position to a target number of shares. If
the position doesn't already exist, this is equivalent to placing a new
order. If the position does exist, this is equivalent to placing an
order for the difference between the target number of shares and the
current number of shares.
"""
if sid in self.portfolio.positions:
current_position = self.portfolio.positions[sid].amount
req_shares = target - current_position
return self.order(sid, req_shares,
limit_price=limit_price,
stop_price=stop_price,
style=style)
else:
return self.order(sid, target,
limit_price=limit_price,
stop_price=stop_price,
style=style)
@api_method
def order_target_value(self, sid, target,
limit_price=None, stop_price=None, style=None):
"""
Place an order to adjust a position to a target value. If
the position doesn't already exist, this is equivalent to placing a new
order. If the position does exist, this is equivalent to placing an
order for the difference between the target value and the
current value.
If the Asset being ordered is a Future, the 'target value' calculated
is actually the target exposure, as Futures have no 'value'.
"""
target_amount = self._calculate_order_value_amount(sid, target)
return self.order_target(sid, target_amount,
limit_price=limit_price,
stop_price=stop_price,
style=style)
@api_method
def order_target_percent(self, sid, target,
limit_price=None, stop_price=None, style=None):
"""
Place an order to adjust a position to a target percent of the
current portfolio value. If the position doesn't already exist, this is
equivalent to placing a new order. If the position does exist, this is
equivalent to placing an order for the difference between the target
percent and the current percent.
Note that target must expressed as a decimal (0.50 means 50\%).
"""
target_value = self.portfolio.portfolio_value * target
return self.order_target_value(sid, target_value,
limit_price=limit_price,
stop_price=stop_price,
style=style)
@api_method
def get_open_orders(self, sid=None):
if sid is None:
return {
key: [order.to_api_obj() for order in orders]
for key, orders in iteritems(self.blotter.open_orders)
if orders
}
if sid in self.blotter.open_orders:
orders = self.blotter.open_orders[sid]
return [order.to_api_obj() for order in orders]
return []
@api_method
def get_order(self, order_id):
if order_id in self.blotter.orders:
return self.blotter.orders[order_id].to_api_obj()
@api_method
def cancel_order(self, order_param):
order_id = order_param
if isinstance(order_param, zipline.protocol.Order):
order_id = order_param.id
self.blotter.cancel(order_id)
@api_method
def add_history(self, bar_count, frequency, field, ffill=True):
data_frequency = self.sim_params.data_frequency
history_spec = HistorySpec(bar_count, frequency, field, ffill,
data_frequency=data_frequency)
self.history_specs[history_spec.key_str] = history_spec
if self.initialized:
if self.history_container:
self.history_container.ensure_spec(
history_spec, self.datetime, self._most_recent_data,
)
else:
self.history_container = self.history_container_class(
self.history_specs,
self.current_universe(),
self.sim_params.first_open,
self.sim_params.data_frequency,
)
def get_history_spec(self, bar_count, frequency, field, ffill):
spec_key = HistorySpec.spec_key(bar_count, frequency, field, ffill)
if spec_key not in self.history_specs:
data_freq = self.sim_params.data_frequency
spec = HistorySpec(
bar_count,
frequency,
field,
ffill,
data_frequency=data_freq,
)
self.history_specs[spec_key] = spec
if not self.history_container:
self.history_container = self.history_container_class(
self.history_specs,
self.current_universe(),
self.datetime,
self.sim_params.data_frequency,
bar_data=self._most_recent_data,
)
self.history_container.ensure_spec(
spec, self.datetime, self._most_recent_data,
)
return self.history_specs[spec_key]
@api_method
def history(self, bar_count, frequency, field, ffill=True):
history_spec = self.get_history_spec(
bar_count,
frequency,
field,
ffill,
)
return self.history_container.get_history(history_spec, self.datetime)
####################
# Account Controls #
####################
def register_account_control(self, control):
"""
Register a new AccountControl to be checked on each bar.
"""
if self.initialized:
raise RegisterAccountControlPostInit()
self.account_controls.append(control)
def validate_account_controls(self):
for control in self.account_controls:
control.validate(self.updated_portfolio(),
self.updated_account(),
self.get_datetime(),
self.trading_client.current_data)
@api_method
def set_max_leverage(self, max_leverage=None):
"""
Set a limit on the maximum leverage of the algorithm.
"""
control = MaxLeverage(max_leverage)
self.register_account_control(control)
####################
# Trading Controls #
####################
def register_trading_control(self, control):
"""
Register a new TradingControl to be checked prior to order calls.
"""
if self.initialized:
raise RegisterTradingControlPostInit()
self.trading_controls.append(control)
@api_method
def set_max_position_size(self,
sid=None,
max_shares=None,
max_notional=None):
"""
Set a limit on the number of shares and/or dollar value held for the
given sid. Limits are treated as absolute values and are enforced at
the time that the algo attempts to place an order for sid. This means
that it's possible to end up with more than the max number of shares
due to splits/dividends, and more than the max notional due to price
improvement.
If an algorithm attempts to place an order that would result in
increasing the absolute value of shares/dollar value exceeding one of
these limits, raise a TradingControlException.
"""
control = MaxPositionSize(asset=sid,
max_shares=max_shares,
max_notional=max_notional)
self.register_trading_control(control)
@api_method
def set_max_order_size(self, sid=None, max_shares=None, max_notional=None):
"""
Set a limit on the number of shares and/or dollar value of any single
order placed for sid. Limits are treated as absolute values and are
enforced at the time that the algo attempts to place an order for sid.
If an algorithm attempts to place an order that would result in
exceeding one of these limits, raise a TradingControlException.
"""
control = MaxOrderSize(asset=sid,
max_shares=max_shares,
max_notional=max_notional)
self.register_trading_control(control)
@api_method
def set_max_order_count(self, max_count):
"""
Set a limit on the number of orders that can be placed within the given
time interval.
"""
control = MaxOrderCount(max_count)
self.register_trading_control(control)
@api_method
def set_do_not_order_list(self, restricted_list):
"""
Set a restriction on which sids can be ordered.
"""
control = RestrictedListOrder(restricted_list)
self.register_trading_control(control)
@api_method
def set_long_only(self):
"""
Set a rule specifying that this algorithm cannot take short positions.
"""
self.register_trading_control(LongOnly())
def current_universe(self):
return self._current_universe
@classmethod
def all_api_methods(cls):
"""
Return a list of all the TradingAlgorithm API methods.
"""
return [fn for fn in cls.__dict__.itervalues()
if getattr(fn, 'is_api_method', False)]
| {
"content_hash": "8efbd86ca936ca7ded9d29e80abf8bb4",
"timestamp": "",
"source": "github",
"line_count": 1221,
"max_line_length": 98,
"avg_line_length": 36.202293202293205,
"alnum_prop": 0.5786258851209194,
"repo_name": "euri10/zipline",
"id": "43aea515e8f814557cc681ffad446eec905e5386",
"size": "44785",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zipline/algorithm.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "564"
},
{
"name": "Emacs Lisp",
"bytes": "138"
},
{
"name": "Python",
"bytes": "1051979"
},
{
"name": "Shell",
"bytes": "4065"
}
],
"symlink_target": ""
} |
"""
tests.components.automation.test_mqtt
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests mqtt automation.
"""
import unittest
import homeassistant.core as ha
import homeassistant.components.automation as automation
from tests.common import mock_mqtt_component, fire_mqtt_message
class TestAutomationMQTT(unittest.TestCase):
""" Test the event automation. """
def setUp(self): # pylint: disable=invalid-name
self.hass = ha.HomeAssistant()
mock_mqtt_component(self.hass)
self.calls = []
def record_call(service):
self.calls.append(service)
self.hass.services.register('test', 'automation', record_call)
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_old_config_if_fires_on_topic_match(self):
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'platform': 'mqtt',
'mqtt_topic': 'test-topic',
'execute_service': 'test.automation'
}
}))
fire_mqtt_message(self.hass, 'test-topic', '')
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
def test_old_config_if_fires_on_topic_and_payload_match(self):
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'platform': 'mqtt',
'mqtt_topic': 'test-topic',
'mqtt_payload': 'hello',
'execute_service': 'test.automation'
}
}))
fire_mqtt_message(self.hass, 'test-topic', 'hello')
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
def test_old_config_if_not_fires_on_topic_but_no_payload_match(self):
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'platform': 'mqtt',
'mqtt_topic': 'test-topic',
'mqtt_payload': 'hello',
'execute_service': 'test.automation'
}
}))
fire_mqtt_message(self.hass, 'test-topic', 'no-hello')
self.hass.pool.block_till_done()
self.assertEqual(0, len(self.calls))
def test_if_fires_on_topic_match(self):
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'mqtt',
'topic': 'test-topic'
},
'action': {
'service': 'test.automation'
}
}
}))
fire_mqtt_message(self.hass, 'test-topic', '')
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_fires_on_topic_and_payload_match(self):
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'mqtt',
'topic': 'test-topic',
'payload': 'hello'
},
'action': {
'service': 'test.automation'
}
}
}))
fire_mqtt_message(self.hass, 'test-topic', 'hello')
self.hass.pool.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_not_fires_on_topic_but_no_payload_match(self):
self.assertTrue(automation.setup(self.hass, {
automation.DOMAIN: {
'trigger': {
'platform': 'mqtt',
'topic': 'test-topic',
'payload': 'hello'
},
'action': {
'service': 'test.automation'
}
}
}))
fire_mqtt_message(self.hass, 'test-topic', 'no-hello')
self.hass.pool.block_till_done()
self.assertEqual(0, len(self.calls))
| {
"content_hash": "66572faf0174179ca50c6831df2e172f",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 73,
"avg_line_length": 32.10569105691057,
"alnum_prop": 0.5148138769308686,
"repo_name": "pottzer/home-assistant",
"id": "516eda539472c0686e275fe7bc9e27688c27e52e",
"size": "3949",
"binary": false,
"copies": "8",
"ref": "refs/heads/dev",
"path": "tests/components/automation/test_mqtt.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1044045"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
} |
"""Utilities for OAuth.
Utilities for making it easier to work with OAuth 2.0
credentials.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import pickle
import threading
from client import Storage as BaseStorage
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from a file."""
def __init__(self, filename):
self._filename = filename
self._lock = threading.Lock()
def get(self):
"""Retrieve Credential from file.
Returns:
apiclient.oauth2client.client.Credentials
"""
self._lock.acquire()
try:
f = open(self._filename, 'r')
credentials = pickle.loads(f.read())
f.close()
credentials.set_store(self.put)
except:
credentials = None
self._lock.release()
return credentials
def put(self, credentials):
"""Write a pickled Credentials to file.
Args:
credentials: Credentials, the credentials to store.
"""
self._lock.acquire()
f = open(self._filename, 'w')
f.write(pickle.dumps(credentials))
f.close()
self._lock.release()
| {
"content_hash": "9f97bc4542d3222e80e350756381780c",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 66,
"avg_line_length": 21.72,
"alnum_prop": 0.6528545119705341,
"repo_name": "karalan/google-tasks-porter",
"id": "9a52ee44495f84ea7ea21a2f49f58bb03fcd93cb",
"size": "1667",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "apiclient/oauth2client/file.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1953"
},
{
"name": "HTML",
"bytes": "16630"
},
{
"name": "Python",
"bytes": "623967"
}
],
"symlink_target": ""
} |
"""Python vCloud API Interface Test - Memchange
Desired behaviour:
We begin by getting the memory settings on a VM.
We then shutdown the machine remotely, and wait for this process to finish.
We then change the memory settings and wait for a signal that the change has completed.
We then activate the machine again.
We then check that the machine is reporting its memory correctly (ie. the new value).
"""
from pytest import raises
from vcloud_settings import settings
import vcloud
from time import sleep
class storeout:
def __init__(self, key):
self.actions = {}
self.key = key
def save(self, value):
self.actions[self.key] = value
def test_main():
"""Test routines for dev. Here temporarily"""
vm_id = 'vm-b8e95c38-b899-496e-bd6b-bcfec39fc52e'
kvstore = storeout(vm_id)
boost_thread = vcloud.boost("Boost Thread", vm_id, settings, kvstore)
boost_thread.start()
kvstore.actions[vm_id] = ""
while 1:
print kvstore.actions[vm_id]
sleep(0)
assert 1==1; | {
"content_hash": "e01c883de02ac139673e2cb928d1f007",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 87,
"avg_line_length": 30.470588235294116,
"alnum_prop": 0.6959459459459459,
"repo_name": "bmcollier/vctools",
"id": "a08ce3713885ecb41a8853d678e86991e2eef821",
"size": "1036",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vctools/test/test_memchange.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "11510"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import tensorflow as tf
class ImageRecognizer:
def __init__(self, path, graph, labels, input_layer_name, output_layer_name, num_top_predictions):
self.path = path
self.graph = graph
self.labels = labels
self.input_layer_name = input_layer_name
self.output_layer_name = output_layer_name
self.num_top_predictions = num_top_predictions
def load_image(self, filename):
"""Read in the image_data to be classified."""
return tf.gfile.FastGFile(filename, 'rb').read()
def load_labels(self, filename):
"""Read in labels, one label per line."""
return [line.rstrip() for line in tf.gfile.GFile(filename)]
def load_graph(self, filename):
"""Unpersists graph from file as default graph."""
with tf.gfile.FastGFile(filename, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
def run_graph(self, image_data, labels, input_layer_name, output_layer_name,
num_top_predictions):
with tf.Session() as sess:
# Feed the image_data as input to the graph.
# predictions will contain a two-dimensional array, where one
# dimension represents the input image count, and the other has
# predictions per class
softmax_tensor = sess.graph.get_tensor_by_name(output_layer_name)
predictions, = sess.run(softmax_tensor, {input_layer_name: image_data})
# Sort to show labels in order of confidence
top_k = predictions.argsort()[-self.num_top_predictions:][::-1]
values = {}
for node_id in top_k:
human_string = labels[node_id]
score = predictions[node_id].item()
print('%s (score = %.5f)' % (human_string, score))
values[human_string] = score
return values
def recognize(self, image):
image_data = self.load_image(image)
labels = self.load_labels(self.path + self.labels)
self.load_graph(self.path+ self.graph)
return self.run_graph(image_data, labels, self.input_layer_name, self.output_layer_name,
self.num_top_predictions)
| {
"content_hash": "3f2af123792c16d693bbcf100ca33f52",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 102,
"avg_line_length": 38.47540983606557,
"alnum_prop": 0.6365573072006817,
"repo_name": "mmiranda96/chilaiquil-api",
"id": "ec61f97332b9a51a18339475055ca310e1a2ab25",
"size": "2347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "image_recognizer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "13525"
},
{
"name": "Python",
"bytes": "4181"
},
{
"name": "Shell",
"bytes": "867"
}
],
"symlink_target": ""
} |
import argparse
import string
import Image, ImageOps
import logging
import urllib
import csv
import time
import calendar
from datetime import datetime
from urlparse import urlparse
# Main ########################################################################
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('csv_read', help='path csv sorgente')
parser.add_argument('dataPresentazione', help='n colonna della data di presentazione', type=int)
parser.add_argument('dataApprovazione', help='n colonna della data di approvazione', type=int)
parser.add_argument('tsv_write', help='path tsv da produrre')
args = parser.parse_args()
with open(args.csv_read, 'rb') as csvfile: #open the csv file
reader = csv.reader(csvfile)
header = next(reader) #skip the header
age = [] # init age array
for row in reader: # for each row
t0 = datetime.strptime(row[args.dataPresentazione], '%Y-%m-%d') # calculate the t0 TS
t1 = datetime.strptime(row[args.dataApprovazione], '%Y-%m-%d') # calculate the t1 TS
diff = t1 - t0 # calculate the difference between the two TS
age.append(diff.days) # append the difference in 'days' to the end of the age array
maxAge = float(max(age)) # calculate the max of all the ages
print('max age: '+str(maxAge))
with open(args.csv_read, 'rb') as csvfile: # open again the csv file
reader = csv.reader(csvfile)
c = csv.writer(open(args.tsv_write, 'wb'),delimiter='\t') # open the tsv file
header = next(reader) # skip the header
printheader = [] # init the header for the tsv file
printheader.append(header[0]) # take the first column (with the ID)
printheader.append('age') # append the age
printheader.append('natura') # append the kind of law
c.writerow(printheader) # write on the tsv the whole header row
for row in reader:
t0 = datetime.strptime(row[args.dataPresentazione], '%Y-%m-%d') # calculate the t0 TS
t1 = datetime.strptime(row[args.dataApprovazione], '%Y-%m-%d') # calculate the t1 TS
diff = t1 - t0 # calculate the difference between the two TS
printrow = [] # init row to print
printrow.append(row[0]) # append the ID column
rateAge = (diff.days)/maxAge # calculate rate age
printrow.append(rateAge) # append to the tsv
natura = str(row[4]) # get kind of law
printrow.append(natura.replace(' ', '').replace('-', '')) # cleanup name
c.writerow(printrow) # append row to the tsv file
| {
"content_hash": "81ca6ec6e52018ec221a55abd1ed69da",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 98,
"avg_line_length": 45.280701754385966,
"alnum_prop": 0.644711352189074,
"repo_name": "obujor/PalMaS",
"id": "9932862a8865b6d8271f062a4ef9367a97b98da2",
"size": "2604",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/iddl_age_tsv.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "55732"
},
{
"name": "CoffeeScript",
"bytes": "498"
},
{
"name": "JavaScript",
"bytes": "414071"
},
{
"name": "PHP",
"bytes": "36"
},
{
"name": "Python",
"bytes": "26779"
}
],
"symlink_target": ""
} |
from tempest.api.volume import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class VolumesV2SnapshotNegativeTestJSON(base.BaseVolumeTest):
@classmethod
def skip_checks(cls):
super(VolumesV2SnapshotNegativeTestJSON, cls).skip_checks()
if not CONF.volume_feature_enabled.snapshot:
raise cls.skipException("Cinder volume snapshots are disabled")
@test.attr(type=['negative'])
@test.idempotent_id('e3e466af-70ab-4f4b-a967-ab04e3532ea7')
def test_create_snapshot_with_nonexistent_volume_id(self):
# Create a snapshot with nonexistent volume id
s_name = data_utils.rand_name(self.__class__.__name__ + '-snap')
self.assertRaises(lib_exc.NotFound,
self.snapshots_client.create_snapshot,
volume_id=data_utils.rand_uuid(),
display_name=s_name)
@test.attr(type=['negative'])
@test.idempotent_id('bb9da53e-d335-4309-9c15-7e76fd5e4d6d')
def test_create_snapshot_without_passing_volume_id(self):
# Create a snapshot without passing volume id
s_name = data_utils.rand_name(self.__class__.__name__ + '-snap')
self.assertRaises(lib_exc.NotFound,
self.snapshots_client.create_snapshot,
volume_id=None, display_name=s_name)
@test.idempotent_id('677863d1-34f9-456d-b6ac-9924f667a7f4')
def test_volume_from_snapshot_decreasing_size(self):
# Creates a volume a snapshot passing a size different from the source
src_size = CONF.volume.volume_size + 1
src_vol = self.create_volume(size=src_size)
src_snap = self.create_snapshot(src_vol['id'])
# Destination volume smaller than source
self.assertRaises(lib_exc.BadRequest,
self.volumes_client.create_volume,
size=src_size - 1,
snapshot_id=src_snap['id'])
class VolumesV1SnapshotNegativeTestJSON(VolumesV2SnapshotNegativeTestJSON):
_api_version = 1
| {
"content_hash": "98585a67703ad128363b564f71692e3c",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 78,
"avg_line_length": 41.132075471698116,
"alnum_prop": 0.6495412844036698,
"repo_name": "Tesora/tesora-tempest",
"id": "1f5bb0de7c1b1f3bcf7dab4116f07ea44e7993d7",
"size": "2753",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/api/volume/test_volumes_snapshots_negative.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3633718"
},
{
"name": "Shell",
"bytes": "9310"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from time import sleep
from boto.ec2 import autoscale
from boto import exception
from paaws.config import Config, attributes
from paaws.userdata import Userdata
from paaws import iam
from paaws import subnets
from paaws import securitygroups
from paaws.sns import get_sns_topic_arn
class Scale(object):
old_method = False
scale_name = None
name = None
process = None
platform = None
env = None
region = None
availability_zones = []
instance_class = None
desired_capacity = None
minimum = None
maximum = None
connection = None
elb = None
destroy_confirmation = True
launch_config_list = []
userdata = None
autoscale_update = False
launchconfig_update = False
def __init__(
self, old_method=False, destroy_confirmation=True, autoscale_update=False,
launchconfig_update=False, scale_name=None, name=None, process=None, platform=None,
env=None, region=None, minimum=None, maximum=None, instance_class=None, desired_capacity=None,
elb=None, ami_id=None, public=False
):
config = Config()
self.old_method = old_method
self.autoscale_update = autoscale_update
self.launchconfig_update = launchconfig_update
self.name = name
self.process = process
self.platform = platform
self.env = env
self.region = region
self.instance_class = instance_class
self.desired_capacity = desired_capacity
self.minimum = minimum
self.maximum = maximum
self.connection = autoscale.connect_to_region(self.region)
self.availability_zones = attributes.availability_zones[self.region]
self.ami_id = ami_id if ami_id is not None else config.ami[self.region]
self.public = public
if scale_name is not None:
self.scale_name = "asg-%s" % scale_name
hostname = self.scale_name.replace("asg-", "").split("-")
self.name = hostname[0]
self.platform = hostname[1]
self.env = hostname[3]
for lc in self.get_all_launch_configs():
if scale_name in lc.name:
self.launch_config_list.append(lc)
if len(self.launch_config_list) > 0:
self.launch_config_name = sorted(self.launch_config_list, key=lambda lc: lc.created_time)[-1].name
else:
self.launch_config_name = "alc1-%s" % scale_name
else:
for lc in self.get_all_launch_configs():
if "%s-%s-%s-%s" % (self.process, self.name, self.platform, self.env) in lc.name:
self.launch_config_list.append(lc)
if len(self.launch_config_list) == 0:
lc_number = 1
elif len(self.launch_config_list) > 0:
lc_number = len(self.launch_config_list)
self.launch_config_name = "config%s-%s-%s-%s-%s" % (
lc_number, self.process, self.name, self.platform, self.env
)
self.scale_name = "scale-%s-%s-%s-%s" % (self.process, self.name, self.platform, self.env)
self.elb = elb
def get_all_launch_configs(self, token=None):
alc = self.connection.get_all_launch_configurations(next_token=token)
if alc.next_token is None:
return alc
else:
return alc + self.get_all_launch_configs(token=alc.next_token)
def get_launch_config(self):
launch_configs = self.connection.get_all_launch_configurations(names=[self.launch_config_name])
if len(launch_configs) > 0:
return launch_configs[0]
return None
def create_launch_config(self):
config = Config()
if self.old_method:
userdata = Userdata(
old_method=self.old_method, name=self.name, platform=self.platform, env=self.env, region=self.region
)
securitygroup = securitygroups.get_or_create(
region=self.region,
raw_mode=True,
fullname="%s-%s-%s-sg" % (self.name, self.platform, self.env)
)
securitygroup_ids = [securitygroup.id]
else:
userdata = Userdata(
name=self.name, app_type=self.process, platform=self.platform, env=self.env, region=self.region
)
securitygroup = securitygroups.get_or_create(
region=self.region,
name=self.name,
process=self.process,
platform=self.platform,
env=self.env
)
securitygroup_ids = [securitygroup.id]
self.userdata = userdata.create()
securitygroup = securitygroups.get(
region=self.region,
raw_mode=True,
fullname="general-management-sg"
)
securitygroup_ids.append(securitygroup.id)
launch_config = autoscale.LaunchConfiguration(
name=self.launch_config_name,
image_id=self.ami_id,
key_name=config.key[self.region],
instance_type=self.instance_class,
security_groups=securitygroup_ids,
instance_profile_name=iam.get(self.name, self.platform, self.env),
user_data=self.userdata,
block_device_mappings=[attributes.bdm],
associate_public_ip_address=self.public
)
self.connection.create_launch_configuration(launch_config)
return launch_config
def get_or_create_launch_config(self):
launch_config = self.get_launch_config()
if launch_config is not None:
return launch_config
launch_config = self.create_launch_config()
return launch_config
def create_notifications(self):
sns_topics = get_sns_topic_arn(platform=self.platform, region=self.region)
for sns_topic in sns_topics:
self.connection.put_notification_configuration(
autoscale_group=self.scale_name,
topic=sns_topic,
notification_types=[
'autoscaling:EC2_INSTANCE_LAUNCH',
'autoscaling:EC2_INSTANCE_LAUNCH_ERROR',
'autoscaling:EC2_INSTANCE_TERMINATE',
'autoscaling:EC2_INSTANCE_TERMINATE_ERROR',
'autoscaling:TEST_NOTIFICATION',
]
)
return True
def create(self):
launch_config = self.get_or_create_launch_config()
subnet_ids = [subnet for subnet in subnets.get(platform="public" if self.public else self.platform, region=self.region)]
if self.public:
try:
subnet_ids.remove('subnet-0c9eea65') # RIDICULOUS HARDCODED FIX (3 SUBNETS PUBLIC)
except ValueError:
pass
autoscale_group = autoscale.AutoScalingGroup(
name=self.scale_name,
launch_config=launch_config,
availability_zones=self.availability_zones,
desired_capacity=self.desired_capacity,
min_size=self.minimum,
max_size=self.maximum,
termination_policies=['OldestInstance'],
load_balancers=[self.elb],
vpc_zone_identifier=','.join(subnet_ids),
connection=self.connection
)
self.connection.create_auto_scaling_group(autoscale_group)
self.create_notifications()
return autoscale_group
def get_scale(self):
autoscale_group = self.connection.get_all_groups(names=[self.scale_name])
if len(autoscale_group) > 0:
return autoscale_group[0]
return None
def destroy(self):
autoscale_group = self.get_scale()
if autoscale_group is not None:
if self.destroy_confirmation:
if raw_input('Are you sure to delete %s group? [y/N] ' % self.scale_name) != 'y':
return 'Aborted'
autoscale_group.delete(force_delete=True)
if len(self.launch_config_list) > 0:
for launch_config in self.launch_config_list:
launch_config.delete()
return "Deleted the %s group and %s Launch Configurations. [ %s ]" % (
self.scale_name, self.launch_config_name, ', '.join(self.launch_config_list)
)
return "The specified group %s dont exist." % self.scale_name
def update(self):
if self.autoscale_update:
autoscale_group = self.get_scale()
if autoscale_group is not None:
autoscale_group.desired_capacity = self.desired_capacity
autoscale_group.update()
return "Updated the desired capacity to %s for group %s." % (
str(self.desired_capacity), self.scale_name
)
return "Autoscale not found"
elif self.launchconfig_update:
launch_config = self.get_launch_config()
if self.instance_class is None:
self.instance_class = launch_config.instance_type
if launch_config is not None:
lc_number = len(self.launch_config_list)+1
if "alc" in launch_config.name:
self.launch_config_name = "alc%s-%s" % (lc_number, self.scale_name.replace("asg-", ""))
elif "config"in launch_config.name:
self.launch_config_name = "config%s-%s-%s-%s-%s" % (
lc_number, self.process, self.name, self.platform, self.env
)
new_launch_config = self.create_launch_config()
autoscale_group = self.get_scale()
autoscale_group.launch_config_name = new_launch_config.name
autoscale_group.update()
return "Updated the launch config %s of %s." % (self.launch_config_name, self.scale_name)
return "Launchconfig not found"
return "Nothing updated, autoscale or launchconfig must be True"
def list_all_groups(region):
header = ["Name", "LaunchConfig", "Instances", "Instance Class", "Desired", "Min", "Max", "Region"]
asg_data = []
asconn = autoscale.connect_to_region(region)
autoscaling_groups = [asg for asg in asconn.get_all_groups()]
for asg in autoscaling_groups:
try:
asg_instance_class = asconn.get_all_launch_configurations(names=[asg.launch_config_name])[0].instance_type
except exception.BotoServerError as err:
if err.message == "Rate exceeded":
sleep(3)
asg_instance_class = asconn.get_all_launch_configurations(names=[asg.launch_config_name])
asg_instance_class = asg_instance_class[0].instance_type
asg_name = asg.name.replace("asg-", "")
asg_lc = asg.launch_config_name
asg_instances = len(asg.instances)
asg_desired = asg.desired_capacity
asg_min = asg.min_size
asg_max = asg.max_size
asg_region = region
asg_data.append([
asg_name,
asg_lc,
asg_instances,
asg_instance_class,
asg_desired,
asg_min,
asg_max,
asg_region
])
asg_data.insert(0, header)
asg_data = {col[0]: col[1:] for col in zip(*asg_data)}
return asg_data
def describe_group(name, old_method=False):
if old_method:
asg_name = "asg-%s" % name
else:
asg_name = name
for region in [region.name for region in attributes.regions]:
asconn = autoscale.connect_to_region(region)
asg = asconn.get_all_groups(names=[asg_name])
if len(asg) > 0:
if asg[0].name == asg_name:
break
try:
autoscaling_group = asg[0]
header = [autoscaling_group.name, "Information"]
asg_data = []
except IndexError as e:
raise ValueError("Invalid autoscaling group name %s. Error: %s" % (asg_name, e))
instances = ["%s - %s" % (instance.instance_id, instance.health_status) for instance in autoscaling_group.instances]
asg_data.append(["ARN", autoscaling_group.autoscaling_group_arn])
asg_data.append(["Desired Capacity", autoscaling_group.desired_capacity])
asg_data.append(["LaunchConfig", autoscaling_group.launch_config_name])
asg_data.append(["Min Size", autoscaling_group.min_size])
asg_data.append(["Max Size", autoscaling_group.max_size])
asg_data.append(["Termination Policy", str(autoscaling_group.termination_policies[0])])
asg_data.append(["Instances", ', '.join(instances)])
asg_data.insert(0, header)
asg_data = {col[0]: col[1:] for col in zip(*asg_data)}
return asg_data
def describe_group_instances(name):
asg_name = "asg-%s" % name
for region in [region.name for region in attributes.regions]:
asconn = autoscale.connect_to_region(region)
asg = asconn.get_all_groups(names=[asg_name])
if len(asg) > 0:
if asg[0].name == asg_name:
break
try:
autoscaling_group = asg[0]
except IndexError as e:
raise ValueError("Invalid autoscaling group name %s. Error: %s" % (asg_name, e))
instances = [instance.instance_id for instance in autoscaling_group.instances]
return instances
| {
"content_hash": "c3593c40b1cbe36dcec4a54d59b366f8",
"timestamp": "",
"source": "github",
"line_count": 360,
"max_line_length": 128,
"avg_line_length": 37.28611111111111,
"alnum_prop": 0.5907025255159055,
"repo_name": "phspagiari/paaws",
"id": "b4cace18735093640be88d94577833d9d51dc695",
"size": "13445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "paaws/resources/autoscale.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "82946"
}
],
"symlink_target": ""
} |
from unittest import TestCase
try:
from unittest import SkipTest
except:
from unittest2 import SkipTest
from random import randint
from threading import Thread
from socket import socket, AF_INET, SOCK_STREAM
from subprocess import Popen,PIPE,STDOUT
import sys, os, string, subprocess
from proton import Connection, Transport, SASL, Endpoint, Delivery, SSL
from proton.reactor import Container
from proton.handlers import CHandshaker, CFlowController
from string import Template
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
# this is for compatibility, apparently the version of jython we
# use doesn't have the next() builtin.
# we should remove this when we upgrade to a python 2.6+ compatible version
# of jython
#_DEF = object() This causes the test loader to fail (why?)
class _dummy(): pass
_DEF = _dummy
def next(iter, default=_DEF):
try:
return iter.next()
except StopIteration:
if default is _DEF:
raise
else:
return default
# I may goto hell for this:
import __builtin__
__builtin__.__dict__['next'] = next
def free_tcp_ports(count=1):
""" return a list of 'count' TCP ports that are free to used (ie. unbound)
"""
retry = 0
ports = []
sockets = []
while len(ports) != count:
port = randint(49152, 65535)
sockets.append( socket( AF_INET, SOCK_STREAM ) )
try:
sockets[-1].bind( ("0.0.0.0", port ) )
ports.append( port )
retry = 0
except:
retry += 1
assert retry != 100, "No free sockets available for test!"
for s in sockets:
s.close()
return ports
def free_tcp_port():
return free_tcp_ports(1)[0]
def pump_uni(src, dst, buffer_size=1024):
p = src.pending()
c = dst.capacity()
if c < 0:
if p < 0:
return False
else:
src.close_head()
return True
if p < 0:
dst.close_tail()
elif p == 0 or c == 0:
return False
else:
binary = src.peek(min(c, buffer_size))
dst.push(binary)
src.pop(len(binary))
return True
def pump(transport1, transport2, buffer_size=1024):
""" Transfer all pending bytes between two Proton engines
by repeatedly calling peek/pop and push.
Asserts that each engine accepts some bytes every time
(unless it's already closed).
"""
while (pump_uni(transport1, transport2, buffer_size) or
pump_uni(transport2, transport1, buffer_size)):
pass
def isSSLPresent():
return SSL.present()
createdSASLDb = False
def _cyrusSetup(conf_dir):
"""Write out simple SASL config.
"""
saslpasswd = ""
if 'SASLPASSWD' in os.environ:
saslpasswd = os.environ['SASLPASSWD']
if os.path.exists(saslpasswd):
t = Template("""sasldb_path: ${db}
mech_list: EXTERNAL DIGEST-MD5 SCRAM-SHA-1 CRAM-MD5 PLAIN ANONYMOUS
""")
abs_conf_dir = os.path.abspath(conf_dir)
subprocess.call(args=['rm','-rf',abs_conf_dir])
os.mkdir(abs_conf_dir)
db = os.path.join(abs_conf_dir,'proton.sasldb')
conf = os.path.join(abs_conf_dir,'proton-server.conf')
f = open(conf, 'w')
f.write(t.substitute(db=db))
f.close()
cmd_template = Template("echo password | ${saslpasswd} -c -p -f ${db} -u proton user")
cmd = cmd_template.substitute(db=db, saslpasswd=saslpasswd)
subprocess.call(args=cmd, shell=True)
os.environ['PN_SASL_CONFIG_PATH'] = abs_conf_dir
global createdSASLDb
createdSASLDb = True
# Globally initialize Cyrus SASL configuration
if SASL.extended():
_cyrusSetup('sasl_conf')
def ensureCanTestExtendedSASL():
if not SASL.extended():
raise Skipped('Extended SASL not supported')
if not createdSASLDb:
raise Skipped("Can't Test Extended SASL: Couldn't create auth db")
class DefaultConfig:
defines = {}
class Test(TestCase):
config = DefaultConfig()
def __init__(self, name):
super(Test, self).__init__(name)
self.name = name
def configure(self, config):
self.config = config
def default(self, name, value, **profiles):
default = value
profile = self.config.defines.get("profile")
if profile:
default = profiles.get(profile, default)
return self.config.defines.get(name, default)
@property
def delay(self):
return float(self.default("delay", "1", fast="0.1"))
@property
def timeout(self):
return float(self.default("timeout", "60", fast="10"))
@property
def verbose(self):
return int(self.default("verbose", 0))
class Skipped(SkipTest):
skipped = True
class TestServer(object):
""" Base class for creating test-specific message servers.
"""
def __init__(self, **kwargs):
self.args = kwargs
self.reactor = Container(self)
self.host = "127.0.0.1"
self.port = 0
if "host" in kwargs:
self.host = kwargs["host"]
if "port" in kwargs:
self.port = kwargs["port"]
self.handlers = [CFlowController(10), CHandshaker()]
self.thread = Thread(name="server-thread", target=self.run)
self.thread.daemon = True
self.running = True
self.conditions = []
def start(self):
self.reactor.start()
retry = 0
if self.port == 0:
self.port = str(randint(49152, 65535))
retry = 10
while retry > 0:
try:
self.acceptor = self.reactor.acceptor(self.host, self.port)
break
except IOError:
self.port = str(randint(49152, 65535))
retry -= 1
assert retry > 0, "No free port for server to listen on!"
self.thread.start()
def stop(self):
self.running = False
self.reactor.wakeup()
self.thread.join()
# Note: all following methods all run under the thread:
def run(self):
self.reactor.timeout = 3.14159265359
while self.reactor.process():
if not self.running:
self.acceptor.close()
self.reactor.stop()
break
def on_connection_bound(self, event):
if "idle_timeout" in self.args:
event.transport.idle_timeout = self.args["idle_timeout"]
def on_connection_local_close(self, event):
self.conditions.append(event.connection.condition)
def on_delivery(self, event):
event.delivery.settle()
#
# Classes that wrap the messenger applications msgr-send and msgr-recv.
# These applications reside in the tests/tools/apps directory
#
class MessengerApp(object):
""" Interface to control a MessengerApp """
def __init__(self):
self._cmdline = None
# options common to Receivers and Senders:
self.ca_db = None
self.certificate = None
self.privatekey = None
self.password = None
self._output = None
def findfile(self, filename, searchpath):
"""Find filename in the searchpath
return absolute path to the file or None
"""
paths = string.split(searchpath, os.pathsep)
for path in paths:
if os.path.exists(os.path.join(path, filename)):
return os.path.abspath(os.path.join(path, filename))
return None
def start(self, verbose=False):
""" Begin executing the test """
cmd = self.cmdline()
self._verbose = verbose
if self._verbose:
print("COMMAND='%s'" % str(cmd))
#print("ENV='%s'" % str(os.environ.copy()))
try:
if os.name=="nt":
# Windows handles python launch by replacing script 'filename' with
# 'python abspath-to-filename' in cmdline arg list.
if cmd[0].endswith('.py'):
foundfile = self.findfile(cmd[0], os.environ['PATH'])
if foundfile is None:
foundfile = self.findfile(cmd[0], os.environ['PYTHONPATH'])
msg = "Unable to locate file '%s' in PATH or PYTHONPATH" % cmd[0]
raise Skipped("Skipping test - %s" % msg)
del cmd[0:1]
cmd.insert(0, foundfile)
cmd.insert(0, sys.executable)
self._process = Popen(cmd, stdout=PIPE, stderr=STDOUT,
bufsize=4096, universal_newlines=True)
except OSError:
e = sys.exc_info()[1]
print("ERROR: '%s'" % e)
msg = "Unable to execute command '%s', is it in your PATH?" % cmd[0]
# NOTE(flaper87): Skip the test if the command is not found.
if e.errno == 2:
raise Skipped("Skipping test - %s" % msg)
assert False, msg
self._ready() # wait for it to initialize
def stop(self):
""" Signal the client to start clean shutdown """
pass
def wait(self):
""" Wait for client to complete """
self._output = self._process.communicate()
if self._verbose:
print("OUTPUT='%s'" % self.stdout())
def status(self):
""" Return status from client process """
return self._process.returncode
def stdout(self):
#self._process.communicate()[0]
if not self._output or not self._output[0]:
return "*** NO STDOUT ***"
return self._output[0]
def stderr(self):
if not self._output or not self._output[1]:
return "*** NO STDERR ***"
return self._output[1]
def cmdline(self):
if not self._cmdline:
self._build_command()
return self._cmdline
def _build_command(self):
assert False, "_build_command() needs override"
def _ready(self):
assert False, "_ready() needs override"
def _do_common_options(self):
""" Common option handling """
if self.ca_db is not None:
self._cmdline.append("-T")
self._cmdline.append(str(self.ca_db))
if self.certificate is not None:
self._cmdline.append("-C")
self._cmdline.append(str(self.certificate))
if self.privatekey is not None:
self._cmdline.append("-K")
self._cmdline.append(str(self.privatekey))
if self.password is not None:
self._cmdline.append("-P")
self._cmdline.append("pass:" + str(self.password))
class MessengerSender(MessengerApp):
""" Interface to configure a sending MessengerApp """
def __init__(self):
MessengerApp.__init__(self)
self._command = None
# @todo make these properties
self.targets = []
self.send_count = None
self.msg_size = None
self.send_batch = None
self.outgoing_window = None
self.report_interval = None
self.get_reply = False
self.timeout = None
self.incoming_window = None
self.recv_count = None
self.name = None
# command string?
def _build_command(self):
self._cmdline = self._command
self._do_common_options()
assert self.targets, "Missing targets, required for sender!"
self._cmdline.append("-a")
self._cmdline.append(",".join(self.targets))
if self.send_count is not None:
self._cmdline.append("-c")
self._cmdline.append(str(self.send_count))
if self.msg_size is not None:
self._cmdline.append("-b")
self._cmdline.append(str(self.msg_size))
if self.send_batch is not None:
self._cmdline.append("-p")
self._cmdline.append(str(self.send_batch))
if self.outgoing_window is not None:
self._cmdline.append("-w")
self._cmdline.append(str(self.outgoing_window))
if self.report_interval is not None:
self._cmdline.append("-e")
self._cmdline.append(str(self.report_interval))
if self.get_reply:
self._cmdline.append("-R")
if self.timeout is not None:
self._cmdline.append("-t")
self._cmdline.append(str(self.timeout))
if self.incoming_window is not None:
self._cmdline.append("-W")
self._cmdline.append(str(self.incoming_window))
if self.recv_count is not None:
self._cmdline.append("-B")
self._cmdline.append(str(self.recv_count))
if self.name is not None:
self._cmdline.append("-N")
self._cmdline.append(str(self.name))
def _ready(self):
pass
class MessengerReceiver(MessengerApp):
""" Interface to configure a receiving MessengerApp """
def __init__(self):
MessengerApp.__init__(self)
self._command = None
# @todo make these properties
self.subscriptions = []
self.receive_count = None
self.recv_count = None
self.incoming_window = None
self.timeout = None
self.report_interval = None
self.send_reply = False
self.outgoing_window = None
self.forwards = []
self.name = None
# command string?
def _build_command(self):
self._cmdline = self._command
self._do_common_options()
self._cmdline += ["-X", "READY"]
assert self.subscriptions, "Missing subscriptions, required for receiver!"
self._cmdline.append("-a")
self._cmdline.append(",".join(self.subscriptions))
if self.receive_count is not None:
self._cmdline.append("-c")
self._cmdline.append(str(self.receive_count))
if self.recv_count is not None:
self._cmdline.append("-b")
self._cmdline.append(str(self.recv_count))
if self.incoming_window is not None:
self._cmdline.append("-w")
self._cmdline.append(str(self.incoming_window))
if self.timeout is not None:
self._cmdline.append("-t")
self._cmdline.append(str(self.timeout))
if self.report_interval is not None:
self._cmdline.append("-e")
self._cmdline.append(str(self.report_interval))
if self.send_reply:
self._cmdline.append("-R")
if self.outgoing_window is not None:
self._cmdline.append("-W")
self._cmdline.append(str(self.outgoing_window))
if self.forwards:
self._cmdline.append("-F")
self._cmdline.append(",".join(self.forwards))
if self.name is not None:
self._cmdline.append("-N")
self._cmdline.append(str(self.name))
def _ready(self):
""" wait for subscriptions to complete setup. """
r = self._process.stdout.readline()
assert r.strip() == "READY", "Unexpected input while waiting for receiver to initialize: %s" % r
class MessengerSenderC(MessengerSender):
def __init__(self):
MessengerSender.__init__(self)
self._command = ["msgr-send"]
class MessengerSenderValgrind(MessengerSenderC):
""" Run the C sender under Valgrind
"""
def __init__(self, suppressions=None):
if "VALGRIND" not in os.environ:
raise Skipped("Skipping test - $VALGRIND not set.")
MessengerSenderC.__init__(self)
if not suppressions:
suppressions = os.path.join(os.path.dirname(__file__),
"valgrind.supp" )
self._command = [os.environ["VALGRIND"], "--error-exitcode=1", "--quiet",
"--trace-children=yes", "--leak-check=full",
"--suppressions=%s" % suppressions] + self._command
class MessengerReceiverC(MessengerReceiver):
def __init__(self):
MessengerReceiver.__init__(self)
self._command = ["msgr-recv"]
class MessengerReceiverValgrind(MessengerReceiverC):
""" Run the C receiver under Valgrind
"""
def __init__(self, suppressions=None):
if "VALGRIND" not in os.environ:
raise Skipped("Skipping test - $VALGRIND not set.")
MessengerReceiverC.__init__(self)
if not suppressions:
suppressions = os.path.join(os.path.dirname(__file__),
"valgrind.supp" )
self._command = [os.environ["VALGRIND"], "--error-exitcode=1", "--quiet",
"--trace-children=yes", "--leak-check=full",
"--suppressions=%s" % suppressions] + self._command
class MessengerSenderPython(MessengerSender):
def __init__(self):
MessengerSender.__init__(self)
self._command = ["msgr-send.py"]
class MessengerReceiverPython(MessengerReceiver):
def __init__(self):
MessengerReceiver.__init__(self)
self._command = ["msgr-recv.py"]
class ReactorSenderC(MessengerSender):
def __init__(self):
MessengerSender.__init__(self)
self._command = ["reactor-send"]
class ReactorSenderValgrind(ReactorSenderC):
""" Run the C sender under Valgrind
"""
def __init__(self, suppressions=None):
if "VALGRIND" not in os.environ:
raise Skipped("Skipping test - $VALGRIND not set.")
ReactorSenderC.__init__(self)
if not suppressions:
suppressions = os.path.join(os.path.dirname(__file__),
"valgrind.supp" )
self._command = [os.environ["VALGRIND"], "--error-exitcode=1", "--quiet",
"--trace-children=yes", "--leak-check=full",
"--suppressions=%s" % suppressions] + self._command
class ReactorReceiverC(MessengerReceiver):
def __init__(self):
MessengerReceiver.__init__(self)
self._command = ["reactor-recv"]
class ReactorReceiverValgrind(ReactorReceiverC):
""" Run the C receiver under Valgrind
"""
def __init__(self, suppressions=None):
if "VALGRIND" not in os.environ:
raise Skipped("Skipping test - $VALGRIND not set.")
ReactorReceiverC.__init__(self)
if not suppressions:
suppressions = os.path.join(os.path.dirname(__file__),
"valgrind.supp" )
self._command = [os.environ["VALGRIND"], "--error-exitcode=1", "--quiet",
"--trace-children=yes", "--leak-check=full",
"--suppressions=%s" % suppressions] + self._command
| {
"content_hash": "ab64ca17560c78d58f975df511bb0940",
"timestamp": "",
"source": "github",
"line_count": 548,
"max_line_length": 104,
"avg_line_length": 33.14598540145985,
"alnum_prop": 0.5914996696762828,
"repo_name": "Azure/qpid-proton",
"id": "c819d4ed13f729ea809e61e8a779bb849b2e5d05",
"size": "18954",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/python/proton_tests/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1232773"
},
{
"name": "C++",
"bytes": "309079"
},
{
"name": "CMake",
"bytes": "86420"
},
{
"name": "Groff",
"bytes": "420"
},
{
"name": "HTML",
"bytes": "8169"
},
{
"name": "Java",
"bytes": "1789552"
},
{
"name": "JavaScript",
"bytes": "244212"
},
{
"name": "PHP",
"bytes": "31076"
},
{
"name": "Perl",
"bytes": "100876"
},
{
"name": "Perl6",
"bytes": "878"
},
{
"name": "Python",
"bytes": "624143"
},
{
"name": "Ruby",
"bytes": "335237"
},
{
"name": "Shell",
"bytes": "10889"
}
],
"symlink_target": ""
} |
import socket
import os
## \brief Maximum number of bytes to read from a socket in one go
BUF_SIZE = 4096
## \brief Maximum length of the contents of a TLV object
LEN_MAX = 65536
# Error codes
ERR_OK = 0
ERR_SOCK_CREATE = 1
SOCK_ERR_BIND = 2
SOCK_ERR_ACCEPT = 3
ERR_REMOVE_PATH = 4
ERR_SOCK_READ = 5
ERR_SOCK_WRITE = 6
ERR_SOCK_PATH_LEN = 7
ERR_DATA_LEN = 8
ERR_ERROR = 42
## \brief A TLV integer is a 32 bit signed integer
TAG_INT = 0
## \brief A TLV string is a UTF-8 encoded string
TAG_STRING = 1
## \brief A TLV byte array contains arbitrary binary data as an array of bytes
TAG_BYTE_ARRAY = 2
## \brief A TLV sequence is a container for other TLV objects
TAG_SEQUENCE = 3
## \brief A TLV double is a floating point number
TAG_DOUBLE = 4
## \brief A TLV NULL object is an empty object that has no contents bytes
TAG_NULL = 5
## \brief A TLV result code is a 32 bit unsigned integer
TAG_RESULT_CODE = 6
## \brief An excpetion class that is used for constructing exception objects in this module.
#
class TlvException(Exception):
## \brief An excpetion class that is used for constructing exception objects in this module.
#
# \param [error_message] Is a string. It has to contain an error message that is to be conveyed to
# receiver of the corresponding exception.
#
def __init__(self, error_message):
Exception.__init__(self, 'tlvobject: ' + error_message)
## \brief A class that implements the concept of a TLV (Tag Length Value) encoded data object.
#
# The encoded form of a TLV object starts with a single byte (the tag) that specifies the type of the
# data that it contains. The tag is followed by two length bytes which specify the length of the following
# contents bytes. Objects of this type are used to talk to the C++ side of the TLV infrastructure.
#
class TlvEntry:
## \brief Constructor.
#
# The default tag is TAG_NULL and therefore there are no contens bytes.
#
def __init__(self):
## \brief Holds the tag of the value currently represented by this TlvEntry instance.
self.tag = TAG_NULL
## \brief Holds the contents bytes of the value currently represented by this TlvEntry instance.
self.value = bytes()
## \brief This method transforms a signed integer into a array of four bytes that represent the
# the integer in two's complement form. Big endian byte ordering is used.
#
# \param [val] Is an integer. It contains the value that is to be transformed
#
# \returns A byte array that represents the vlaue specified in parameter val.
#
@staticmethod
def int_to_bytes(val):
if val < 0:
val = -val
val = val ^ 0xFFFFFFFF
val = val + 1
return val.to_bytes(4, byteorder='big')
## \brief This method sets this TlvEntry instance up to represent a signed integer.
#
# \param [int_val] Is an integer. It contains the value that is to be represented.
#
# \returns self.
#
def to_int(self, int_val):
self.tag = TAG_INT
self.value = TlvEntry.int_to_bytes(int_val)
return self
## \brief This method sets this TlvEntry instance up to represent an unsigned integer.
#
# \param [result_val] Is an integer. It contains the value that is to be represented.
#
# \returns self.
#
def to_result(self, result_val):
self.tag = TAG_RESULT_CODE
result_val = result_val & 0xFFFFFFFF
self.value = result_val.to_bytes(4, byteorder='big')
return self
## \brief This method sets this TlvEntry instance up to represent a string.
#
# \param [str_val] Is a string. It contains the value that is to be represented.
#
# \returns self.
#
def to_string(self, str_val):
self.tag = TAG_STRING
self.value = str_val.encode()
return self
## \brief This method sets this TlvEntry instance up to represent a NULL object.
#
# \returns self.
#
def to_null(self):
self.tag = TAG_NULL
self.value = b''
return self
## \brief This method sets this TlvEntry instance up to represent a sequence of TlvEntry objects.
#
# \param [obj_sequence] Is a sequence of TlvEntry objects. It contains the values that are to be represented.
#
# \returns self.
#
def to_sequence(self, obj_sequence):
self.tag = TAG_SEQUENCE
self.value = TlvStream.to_bytes(obj_sequence)
return self
## \brief This method sets this TlvEntry instance up to represent a floating point double value.
#
# \param [double_val] Is a floating point number. It contains the value that is to be represented.
#
# \returns self.
#
def to_double(self, double_val):
self.tag = TAG_DOUBLE
self.value = str(double_val).encode()
return self
## \brief This method sets this TlvEntry instance up to represent a byte array.
#
# \param [byte_vector] Is a byte array. It contains the value that is to be represented.
#
# \returns self.
#
def to_byte_array(self, byte_vector):
self.tag = TAG_BYTE_ARRAY
self.value = byte_vector
return self
## \brief This method converts this TlvEntry instance into a regular python3 value.
#
# \returns Either a signed integer, an unsigned integer, a string, a byte array, a floating point number,
# the None value or a sequence holding values of the aforementioned types. The specific type returned
# depends on the value of self.tag. If an unknown tag value is encountered a hash with the keys "key" and
# "value" is returned.
#
def tlv_convert(self):
result = None
if self.tag == TAG_INT:
if len(self.value) == 4:
result = int.from_bytes(self.value, byteorder='big')
if result & 0x80000000:
result = result ^ 0xFFFFFFFF
result = result + 1
result = -result
else:
raise TlvException('Format Error')
elif self.tag == TAG_RESULT_CODE:
if len(self.value) == 4:
result = int.from_bytes(self.value, byteorder='big')
else:
raise TlvException('Format Error')
elif self.tag == TAG_STRING:
result = self.value.decode()
elif self.tag == TAG_NULL:
result = None
elif self.tag == TAG_DOUBLE:
result = eval(self.value.decode())
elif self.tag == TAG_SEQUENCE:
result = []
res = TlvStream.parse_bytes(self.value)
if res.err_code != ERR_OK:
raise TlvException('Unable to parse')
# Beware: Recursion happens here ;-)!
for i in res.data:
result.append(i.tlv_convert())
elif self.tag == TAG_BYTE_ARRAY:
result = self.value
else:
result = {'tag':self.tag, 'value':self.value}
return result
## \brief A class that is intended to represent a generic return value.
#
class TlvResult:
## \brief Constructor.
#
# \param [err_code] Is an integer. It represents an error code. A value of ERR_OK signals successfull
# completion of the operation.
#
# \param [data] The type of this parameter is generic. It contains the data returned by the
# operation.
#
def __init__(self, err_code, data):
self.err_code = err_code
self.data = data
## \brief A class that binds together a collection of static methods that deal with sending
# and receiving TLV encoded objects via UNIX domain sockets.
#
class TlvStream:
## \brief Constructor.
#
def __init__(self):
pass
## \brief This method performs a transaction with the server
#
# \param [sock] Is a socket object
#
# \param [tlv_param] A TlvEntry object that specifies the parameter to use in the call
#
# \returns A sequence of objects that depends on the values returned by the server.
#
@staticmethod
def transact_client(sock, tlv_param):
if TlvStream.write_tlv(sock, tlv_param) != ERR_OK:
raise TlvException('Sending data failed')
res = TlvStream.read_tlv(sock)
if res.err_code != ERR_OK:
raise TlvException('Receiving data failed')
return res.data.tlv_convert()
## \brief This method performs a transaction with a client
#
# \param [sock] A socket object. This object is used to talk to the client,
#
# \param [processor] Is a callable thing that has a process method that receives a TlvEntry object
# and returns a TlvEntry object
#
# \returns An integer specifying an error code
#
@staticmethod
def transact_server(sock, processor):
result = ERR_OK
res = TlvStream.read_tlv(sock)
if res.err_code != ERR_OK:
raise TlvException('Error receiving data from client')
try:
tlv_for_client = processor.process(res.data)
result = TlvStream.write_tlv(sock, tlv_for_client)
except:
result = ERR_ERROR
return result
## \brief This method allows to read a specfic number of bytes from a socket. It does not return until
# the desired number of bytes has been read.
#
# \param [sock] Is a socket object. This socket is used for reading data.
#
# \param [bytes_to_read] Is an integer. It specifies the number of bytes to read from the socket.
#
# \returns A TlvResult object. In case of success the data member contains the bytes read in the form of a
# byte array.
#
@staticmethod
def _read_defined(sock, bytes_to_read):
result = TlvResult(ERR_OK, bytes())
try:
while (bytes_to_read > 0) and (result.err_code == ERR_OK):
data = sock.recv(min(BUF_SIZE, bytes_to_read))
if not data:
result.err_code = ERR_SOCK_READ
continue
else:
bytes_to_read -= len(data)
result.data = result.data + data
except:
result.err_code = ERR_SOCK_READ
return result
## \brief This method allows to read a single TlvEntry object from a socket. It does not return until a TLV
# object was read or an error was encountered.
#
# \param [sock] Is a socket object. This socket is used for reading data.
#
# \returns A TlvResult object. In case of success the data member contains the TlvEntry object read from
# the socket.
#
@staticmethod
def read_tlv(sock):
result = TlvResult(ERR_OK, TlvEntry())
header_res = TlvStream._read_defined(sock, 3)
if header_res.err_code != ERR_OK:
result.err_code = header_res.err_code
else:
result.data.tag = int(header_res.data[0])
data_len = (int(header_res.data[1]) << 8) + int(header_res.data[2])
data_res = TlvStream._read_defined(sock, data_len)
if data_res.err_code == ERR_OK:
result.data.value = data_res.data
else:
result.err_code = data_res.err_code
return result
## \brief This method allows to construct the encoded header of a TLV object. The header consists of
# the single byte representing the tag followed by the two bytes which specify the length of the
# contents bytes. The length bytes use big endian byte order.
#
# \param [tag] Is a integer. It designates the tag.
#
# \param [data_len] Is a integer. It designates the number of contents bytes.
#
# \returns A three element byte array that represents the encoded TLV header.
#
@staticmethod
def make_header(tag, data_len):
return tag.to_bytes(1, byteorder='big') + data_len.to_bytes(2, byteorder='big')
## \brief This method parses a byte array into a sequence of TlvEntry objects.
#
# \param [encoded_bytes] Is a byte array. It contains a number of encoded TLV objects.
#
# \returns A TlvResult object. In case of success the data member holds a sequence of TlvEntry objects
# that were parsed from the value given in parameter encoded_bytes.
#
@staticmethod
def parse_bytes(encoded_bytes):
result = TlvResult(ERR_OK, [])
end_position = len(encoded_bytes)
read_position = 0
while (result.err_code == ERR_OK) and (read_position < end_position):
if (end_position - read_position) >= 3:
entry = TlvEntry()
# Parse tag and length bytes
entry.tag = int(encoded_bytes[read_position])
entry_len = (int(encoded_bytes[read_position + 1]) << 8) + int(encoded_bytes[read_position + 2])
read_position += 3
# Parse contents bytes
if (end_position - read_position) >= entry_len:
entry.value = encoded_bytes[read_position:read_position + entry_len]
read_position += entry_len
result.data.append(entry)
else:
result.err_code = ERR_DATA_LEN
else:
result.err_code = ERR_DATA_LEN
return result
## \brief This method encodes a sequence of TlvEntry objects into a byte array.
#
# \param [tlv_objects] Is a sequence of TlvEntry objects.
#
# \returns A byte array that consists of the concatenated encodings of the TlvEntry objects specified by
# parameter tlv_objects.
#
@staticmethod
def to_bytes(tlv_objects):
result = bytes()
for i in tlv_objects:
result = result + TlvStream.make_header(i.tag, len(i.value))
result = result + i.value
return result
## \brief This method transforms a sequence of TlvEntry objects into a corresponding sequence of python3 values.
#
# \param [tlv_obj_sequence] Is a sequence of TlvEntry objects.
#
# \returns A sequence of generic python3 values. The types of the sequence components depend on the tags of the
# TlvEntry objects specified by parameter tlv_obj_sequence.
#
@staticmethod
def convert_all(tlv_obj_sequence):
return list(map(lambda x: x.tlv_convert(), tlv_obj_sequence))
## \brief This method allows to write a single TlvEntry object to a socket. It does not return until the TLV
# object was sent or an error was encountered.
#
# \param [sock] Is a socket object. This socket is used for sending data.
#
# \param [tlv_object] Is a TlvEntry object. This object is encoded and sent through the socket.
#
# \returns An integer. This integer represents an error code. A value of ERR_OK signifies successfull completion
# of the send operation.
#
@staticmethod
def write_tlv(sock, tlv_object):
result = ERR_OK
if len(tlv_object.value) <= LEN_MAX:
try:
data = TlvStream.make_header(tlv_object.tag, len(tlv_object.value))
data = data + tlv_object.value
sock.sendall(data)
except:
result = ERR_SOCK_WRITE
else:
result = ERR_DATA_LEN
return result
| {
"content_hash": "eb4d8671f6e26cf06847dd08ffd3a417",
"timestamp": "",
"source": "github",
"line_count": 434,
"max_line_length": 119,
"avg_line_length": 37.13133640552996,
"alnum_prop": 0.5929878994725412,
"repo_name": "rmsk2/Das-grosse-Quiz",
"id": "adacc3e650cdf1f679f1479de82d8be900c6f0bd",
"size": "17092",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "server/tlvobject.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "110758"
}
],
"symlink_target": ""
} |
"""
Example DAG demonstrating setting up inter-DAG dependencies using ExternalTaskSensor and
ExternalTaskMarker.
In this example, child_task1 in example_external_task_marker_child depends on parent_task in
example_external_task_marker_parent. When parent_task is cleared with 'Recursive' selected,
the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its downstream tasks.
ExternalTaskSensor will keep poking for the status of remote ExternalTaskMarker task at a regular
interval till one of the following will happen:
ExternalTaskMarker reaches the states mentioned in the allowed_states list.
In this case, ExternalTaskSensor will exit with a success status code
ExternalTaskMarker reaches the states mentioned in the failed_states list
In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this
with multiple downstream tasks
ExternalTaskSensor times out. In this case, ExternalTaskSensor will raise AirflowSkipException
or AirflowSensorTimeout exception
"""
import pendulum
from airflow import DAG
from airflow.operators.empty import EmptyOperator
from airflow.sensors.external_task import ExternalTaskMarker, ExternalTaskSensor
start_date = pendulum.datetime(2021, 1, 1, tz="UTC")
with DAG(
dag_id="example_external_task_marker_parent",
start_date=start_date,
catchup=False,
schedule_interval=None,
tags=['example2'],
) as parent_dag:
# [START howto_operator_external_task_marker]
parent_task = ExternalTaskMarker(
task_id="parent_task",
external_dag_id="example_external_task_marker_child",
external_task_id="child_task1",
)
# [END howto_operator_external_task_marker]
with DAG(
dag_id="example_external_task_marker_child",
start_date=start_date,
schedule_interval=None,
catchup=False,
tags=['example2'],
) as child_dag:
# [START howto_operator_external_task_sensor]
child_task1 = ExternalTaskSensor(
task_id="child_task1",
external_dag_id=parent_dag.dag_id,
external_task_id=parent_task.task_id,
timeout=600,
allowed_states=['success'],
failed_states=['failed', 'skipped'],
mode="reschedule",
)
# [END howto_operator_external_task_sensor]
child_task2 = EmptyOperator(task_id="child_task2")
child_task1 >> child_task2
| {
"content_hash": "7473bcde381c9a78f311bbd980b41503",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 97,
"avg_line_length": 35.60606060606061,
"alnum_prop": 0.7459574468085106,
"repo_name": "danielvdende/incubator-airflow",
"id": "733b7327566338dabc1dd53494f4d3db7d9686e0",
"size": "3138",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "airflow/example_dags/example_external_task_marker_dag.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25785"
},
{
"name": "Dockerfile",
"bytes": "76693"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "164512"
},
{
"name": "JavaScript",
"bytes": "236992"
},
{
"name": "Jinja",
"bytes": "37155"
},
{
"name": "Jupyter Notebook",
"bytes": "2929"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "21824455"
},
{
"name": "R",
"bytes": "313"
},
{
"name": "Shell",
"bytes": "495567"
},
{
"name": "TypeScript",
"bytes": "326556"
}
],
"symlink_target": ""
} |
"""Control switches."""
from datetime import timedelta
import logging
from typing import Any
from ProgettiHWSW.relay import Relay
import async_timeout
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from . import setup_switch
from .const import DEFAULT_POLLING_INTERVAL_SEC, DOMAIN
_LOGGER = logging.getLogger(DOMAIN)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the switches from a config entry."""
board_api = hass.data[DOMAIN][config_entry.entry_id]
relay_count = config_entry.data["relay_count"]
switches = []
async def async_update_data():
"""Fetch data from API endpoint of board."""
async with async_timeout.timeout(5):
return await board_api.get_switches()
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="switch",
update_method=async_update_data,
update_interval=timedelta(seconds=DEFAULT_POLLING_INTERVAL_SEC),
)
await coordinator.async_refresh()
for i in range(1, int(relay_count) + 1):
switches.append(
ProgettihwswSwitch(
coordinator,
f"Relay #{i}",
setup_switch(board_api, i, config_entry.data[f"relay_{str(i)}"]),
)
)
async_add_entities(switches)
class ProgettihwswSwitch(CoordinatorEntity, SwitchEntity):
"""Represent a switch entity."""
def __init__(self, coordinator, name, switch: Relay):
"""Initialize the values."""
super().__init__(coordinator)
self._switch = switch
self._name = name
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self._switch.control(True)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self._switch.control(False)
await self.coordinator.async_request_refresh()
async def async_toggle(self, **kwargs: Any) -> None:
"""Toggle the state of switch."""
await self._switch.toggle()
await self.coordinator.async_request_refresh()
@property
def name(self):
"""Return the switch name."""
return self._name
@property
def is_on(self):
"""Get switch state."""
return self.coordinator.data[self._switch.id]
| {
"content_hash": "b52cf4dff2e7bc753052ddf5c248b9ab",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 81,
"avg_line_length": 29.869565217391305,
"alnum_prop": 0.6528384279475983,
"repo_name": "mezz64/home-assistant",
"id": "0aab943e3858f4e64380dc9991ab2ebe2f9c07e4",
"size": "2748",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/progettihwsw/switch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52481895"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
import sys
import inspect
from py4j.protocol import Py4JJavaError
__all__ = []
def _exception_message(excp):
"""Return the message from an exception as either a str or unicode object. Supports both
Python 2 and Python 3.
>>> msg = "Exception message"
>>> excp = Exception(msg)
>>> msg == _exception_message(excp)
True
>>> msg = u"unicöde"
>>> excp = Exception(msg)
>>> msg == _exception_message(excp)
True
"""
if isinstance(excp, Py4JJavaError):
# 'Py4JJavaError' doesn't contain the stack trace available on the Java side in 'message'
# attribute in Python 2. We should call 'str' function on this exception in general but
# 'Py4JJavaError' has an issue about addressing non-ascii strings. So, here we work
# around by the direct call, '__str__()'. Please see SPARK-23517.
return excp.__str__()
if hasattr(excp, "message"):
return excp.message
return str(excp)
def _get_argspec(f):
"""
Get argspec of a function. Supports both Python 2 and Python 3.
"""
# `getargspec` is deprecated since python3.0 (incompatible with function annotations).
# See SPARK-23569.
if sys.version_info[0] < 3:
argspec = inspect.getargspec(f)
else:
argspec = inspect.getfullargspec(f)
return argspec
if __name__ == "__main__":
import doctest
(failure_count, test_count) = doctest.testmod()
if failure_count:
sys.exit(-1)
| {
"content_hash": "05d81ca180e2b08cf4a0da3fb69e7d36",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 97,
"avg_line_length": 29.62,
"alnum_prop": 0.6347062795408508,
"repo_name": "joseph-torres/spark",
"id": "49afc13640332dc538eaeadfe85c91335c92cc7d",
"size": "2291",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/pyspark/util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "33781"
},
{
"name": "Batchfile",
"bytes": "30285"
},
{
"name": "C",
"bytes": "1493"
},
{
"name": "CSS",
"bytes": "23957"
},
{
"name": "HTML",
"bytes": "10056"
},
{
"name": "Java",
"bytes": "3169157"
},
{
"name": "JavaScript",
"bytes": "141585"
},
{
"name": "Makefile",
"bytes": "7774"
},
{
"name": "PLpgSQL",
"bytes": "83773"
},
{
"name": "PowerShell",
"bytes": "3756"
},
{
"name": "Python",
"bytes": "2436389"
},
{
"name": "R",
"bytes": "1089584"
},
{
"name": "Roff",
"bytes": "14714"
},
{
"name": "SQLPL",
"bytes": "27170"
},
{
"name": "Scala",
"bytes": "24595570"
},
{
"name": "Shell",
"bytes": "158689"
},
{
"name": "Thrift",
"bytes": "33605"
}
],
"symlink_target": ""
} |
from unittest import mock
from monasca_notification import notification as m_notification
from monasca_notification.plugins import jira_notifier
import queue
from tests import base
def alarm():
return {'tenantId': '0',
'alarmId': '0',
'alarmDefinitionId': 0,
'alarmName': 'test Alarm',
'alarmDescription': 'test Alarm description',
'oldState': 'OK',
'newState': 'ALARM',
'severity': 'CRITICAL',
'link': 'some-link',
'lifecycleState': 'OPEN',
'stateChangeReason': 'I am alarming!',
'timestamp': 1429023453632,
'metrics': {'dimension': {'hostname': 'foo1', 'service': 'bar1'}}}
def issue(component=True, custom_config=False):
issue = {'fields': {'description': 'Monasca alarm',
'issuetype': {'name': 'Bug'},
'project': {'key': 'MyProject'},
'summary': 'Monasca alarm for alarm_defintion test Alarm status '
'changed to ALARM for the alarm_id 0'}}
if component:
issue['fields'].update({'components': [{'name': 'MyComponent'}]})
if custom_config:
alarm_value = alarm()
issue['fields'].update({'description': alarm_value.get('alarmName')})
summary_format_string = 'Alarm created for {0} with severity {1} for {2}'
summary = summary_format_string.format(alarm_value.get('alarmName'),
alarm_value.get('newState'),
alarm_value.get('alarmId'))
issue['fields'].update({'summary': summary})
return issue
class RequestsResponse(object):
def __init__(self, status):
self.status_code = status
class TestJira(base.PluginTestCase):
default_address = 'http://test.jira:3333/?project=MyProject' \
'&component=MyComponent'
default_transitions_value = [[{'id': 100, 'name': 'reopen'}]]
issue_status_resolved = 'resolved'
def setUp(self):
super(TestJira, self).setUp(
jira_notifier.register_opts
)
self.conf_override(
group='jira_notifier',
user='username',
password='password'
)
self._trap = queue.Queue()
mock_log = mock.Mock()
mock_log.info = self._trap.put
mock_log.debug = self._trap.put
mock_log.warn = self._trap.put
mock_log.error = self._trap.put
mock_log.exception = self._trap.put
self._jr = jira_notifier.JiraNotifier(mock_log)
@mock.patch('monasca_notification.plugins.jira_notifier.jira')
def _notify(self,
transitions_value,
issue_status,
address,
mock_jira):
alarm_dict = alarm()
mock_jira_obj = mock.Mock()
mock_jira.JIRA.return_value = mock_jira_obj
mock_jira_issue = mock.Mock()
if issue_status:
mock_jira_obj.search_issues.return_value = [mock_jira_issue]
mock_jira_issue.fields.status.name = issue_status
else:
mock_jira_obj.search_issues.return_value = []
mock_jira_obj.transitions.side_effect = transitions_value
notification = m_notification.Notification(0, 'jira',
'jira notification',
address, 0, 0, alarm_dict)
return mock_jira, mock_jira_obj, self._jr.send_notification(notification)
def test_send_notification_issue_status_resolved(self):
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
TestJira.issue_status_resolved,
TestJira.default_address)
self.assertTrue(result)
mock_jira_obj.create_issue.assert_not_called()
self.assertEqual(
"project=MyProject and reporter='username' and summary ~ '0'",
mock_jira_obj.search_issues.call_args[0][0])
self.assertEqual(100, mock_jira_obj.transition_issue.call_args[0][1])
def test_send_notification_issue_status_closed(self):
issue_status = 'closed'
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
issue_status,
TestJira.default_address)
self.assertTrue(result)
mock_jira_obj.create_issue.assert_not_called()
self.assertEqual(
"project=MyProject and reporter='username' and summary ~ '0'",
mock_jira_obj.search_issues.call_args[0][0])
self.assertEqual(100, mock_jira_obj.transition_issue.call_args[0][1])
def test_send_notification_issue_status_progress(self):
issue_status = 'progress'
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
issue_status,
TestJira.default_address)
self.assertTrue(result)
mock_jira_obj.create_issue.assert_not_called()
mock_jira_obj.transitions.assert_not_called()
def test_send_notification_not_allow_transitions(self):
transitions_value = [[{'id': 100, 'name': 'not open'}]]
mock_jira, mock_jira_obj, result = self._notify(transitions_value,
TestJira.issue_status_resolved,
TestJira.default_address)
self.assertTrue(result)
mock_jira_obj.create_issue.assert_not_called()
mock_jira_obj.transition_issue.assert_not_called()
def test_send_notification_without_issue_list(self):
issue_status = None
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
issue_status,
TestJira.default_address)
self.assertTrue(result)
mock_jira_obj.transitions.assert_not_called()
self.assertEqual(issue(), mock_jira_obj.create_issue.call_args[1])
def test_send_notification_without_componet(self):
issue_status = None
address = 'http://test.jira:3333/?project=MyProject'
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
issue_status,
address)
self.assertTrue(result)
self.assertEqual(issue(component=False), mock_jira_obj.create_issue.call_args[1])
mock_jira_obj.transitions.assert_not_called()
def test_send_notification_create_jira_object_args(self):
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
TestJira.issue_status_resolved,
TestJira.default_address)
self.assertEqual('http://test.jira:3333/', mock_jira.JIRA.call_args[0][0])
self.assertEqual(('username', 'password'), mock_jira.JIRA.call_args[1].get('basic_auth'))
self.assertEqual(None, mock_jira.JIRA.call_args[1].get('proxies'))
def test_send_notification_with_proxy(self):
self.conf_override(
proxy='http://yourid:password@proxyserver:8080',
group='jira_notifier'
)
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
TestJira.issue_status_resolved,
TestJira.default_address)
self.assertTrue(result)
self.assertEqual({'https': 'http://yourid:password@proxyserver:8080'},
mock_jira.JIRA.call_args[1].get('proxies'))
def test_send_notification_custom_config_success(self):
issue_status = None
self.conf_override(
custom_formatter='tests/resources/test_jiraformat.yml',
group='jira_notifier'
)
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
issue_status,
TestJira.default_address)
self.assertTrue(result)
mock_jira_obj.transitions.assert_not_called()
self.assertEqual(issue(custom_config=True), mock_jira_obj.create_issue.call_args[1])
def test_send_notification_custom_config_failed(self):
self.conf_override(
custom_formatter='tests/resources/test_jiraformat_without_summary.yml',
group='jira_notifier'
)
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
TestJira.issue_status_resolved,
TestJira.default_address)
self.assertFalse(result)
def test_send_notification_custom_config_without_comments(self):
self.conf_override(
custom_formatter='tests/resources/test_jiraformat_without_comments.yml',
group='jira_notifier'
)
mock_jira, mock_jira_obj, result = self._notify(TestJira.default_transitions_value,
TestJira.issue_status_resolved,
TestJira.default_address)
self.assertTrue(result)
mock_jira_obj.add_comment.assert_not_called()
def test_send_notification_custom_config_exception(self):
self.conf_override(
custom_formatter='tests/resources/not_exist_file.yml',
group='jira_notifier'
)
self.assertRaises(Exception, self._notify,
TestJira.default_transitions_value,
TestJira.issue_status_resolved,
TestJira.default_address)
def test_type(self):
self.assertEqual('jira', self._jr.type)
def test_statsd_name(self):
self.assertEqual('jira_notifier', self._jr.statsd_name)
| {
"content_hash": "1b4f045bcacb078f1010aa27639ec2c6",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 97,
"avg_line_length": 45.55652173913043,
"alnum_prop": 0.5499141057453713,
"repo_name": "openstack/monasca-notification",
"id": "a0750f036008664bb9405536fc477a6bcf27c873",
"size": "11050",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_jira_notification.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1309"
},
{
"name": "Jinja",
"bytes": "14391"
},
{
"name": "Python",
"bytes": "246245"
},
{
"name": "Shell",
"bytes": "7529"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Todo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=300)),
('marked', models.BooleanField(default=False)),
],
),
]
| {
"content_hash": "9d49ffc4fe360c2f1b7d544011ae1cde",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 114,
"avg_line_length": 24.545454545454547,
"alnum_prop": 0.5666666666666667,
"repo_name": "eswar2016/drf-redux",
"id": "d54dd21e86b9e30460a7beb2037698fa9f60d0b7",
"size": "612",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "todomvc/server/todo/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "352"
},
{
"name": "JavaScript",
"bytes": "22083"
},
{
"name": "Python",
"bytes": "8089"
}
],
"symlink_target": ""
} |
import django
from django import forms
from django.conf.urls import url
from django.contrib import admin
# from django.core.urlresolvers import reverse
from django.http.response import JsonResponse
from filer import settings as filer_settings
from ..models import File, Image
from ..utils import file_is_image_by_name
# FIXME get it from settings
from ..widgets import FILE_TYPE_CHOICES, THUMBNAIL_SIZE
# compat thing!
if django.VERSION[:2] < (1, 10):
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
class FilerGuiFileSelectForm(forms.Form):
"""
Simple form to check if file exists
"""
filer_file = forms.ModelChoiceField(
queryset=File.objects.all()
)
class FilerGuiUploadForm(forms.Form):
"""
Simple form to check if the upload is ok
"""
file = forms.FileField()
file_type = forms.ChoiceField(
choices=FILE_TYPE_CHOICES,
initial=FILE_TYPE_CHOICES[0][0]
)
class FilerFileForm(forms.ModelForm):
"""
Simple form to create a filer file
"""
class Meta:
model = File
fields = [
'file',
'is_public',
'original_filename',
'owner',
]
class FilerImageForm(forms.ModelForm):
"""
Simple form to create a filer file
"""
class Meta:
model = Image
fields = [
'file',
'is_public',
'original_filename',
'owner',
]
class FilerGuiAdmin(admin.ModelAdmin):
def get_urls(self):
urls = [
url(
r'^file-detail-json/$',
self.admin_site.admin_view(self.file_detail_json_view),
name='file_detail_json_for_id'
),
url(
r'^file-upload/$',
self.admin_site.admin_view(self.file_upload_view),
name='file_upload'
),
]
return urls
def get_file_detail_dict(self, obj):
if obj.file_type == 'Image':
th = obj.easy_thumbnails_thumbnailer
thumb = th.get_thumbnail({'size': THUMBNAIL_SIZE})
thumb_url = thumb.url
edit_url = reverse('admin:filer_image_change', args=[obj.id])
else:
thumb_url = obj.icons['48']
edit_url = reverse('admin:filer_file_change', args=[obj.id])
data = {
'label': obj.label,
'file_id': obj.id,
'file_url': obj.url,
'icon_url': obj.icons['48'],
'thumb_url': thumb_url,
'edit_url': edit_url,
'file_type': obj.file_type.lower(),
}
return data
def file_detail_json_view(self, request):
form = FilerGuiFileSelectForm(request.POST)
if form.is_valid():
obj = form.cleaned_data.get('filer_file')
data = {
'message': 'ok',
'file': self.get_file_detail_dict(obj),
}
else:
data = {
'message': 'error',
'error': 'no valid file id',
}
return JsonResponse(data=data)
def file_upload_view(self, request):
data = {}
files = getattr(request, 'FILES')
if not files:
data = {
'message': 'error',
'error': 'no file'
}
else:
form = FilerGuiUploadForm(request.POST, request.FILES)
if form.is_valid():
# TODO: .values()[0] was for python2, is this correct?
# upload = request.FILES.values()[0]
upload = request.FILES.get('file')
# TODO: get rid of this distinction and find a proper way
# to get the correct model form
form_class = None
if file_is_image_by_name(upload.name):
form_class = FilerImageForm
else:
form_class = FilerFileForm
filer_form = form_class(
{
'owner': request.user.pk,
'original_filename': upload.name,
'is_public': filer_settings.FILER_IS_PUBLIC_DEFAULT,
},
{
'file': upload
}
)
if filer_form.is_valid():
# TODO check why settings.FILER_IS_PUBLIC_DEFAULT
# needs to be set to True for the following to work
obj = filer_form.save() # commit=False
data = {
'message': 'ok',
'file': self.get_file_detail_dict(obj),
}
return JsonResponse(data=data)
def get_model_perms(self, *args, **kwargs):
"""
It seems this is the only way to hide this admin in the app_index :(
"""
return {
'add': False,
'change': False,
'delete': False,
}
| {
"content_hash": "4881cb5508f18904dca08d49fa17d345",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 76,
"avg_line_length": 29.270114942528735,
"alnum_prop": 0.49970547810720595,
"repo_name": "rouxcode/django-filer-addons",
"id": "34eaac2092a0af36f3f9463101677c5fac8db7ed",
"size": "5093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "filer_addons/filer_gui/admin/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2447"
},
{
"name": "HTML",
"bytes": "23218"
},
{
"name": "JavaScript",
"bytes": "22292"
},
{
"name": "Python",
"bytes": "106959"
},
{
"name": "SCSS",
"bytes": "16692"
},
{
"name": "Shell",
"bytes": "103"
}
],
"symlink_target": ""
} |
'''
Created on Oct 19, 2015
@author: wirkert
'''
import unittest
import numpy as np
import mc.camera as cam
class TestCamera(unittest.TestCase):
def setUp(self):
# setup a imaging system that does almost nothing
# (camera with 8 bands, each the same
# and just taking all the wavelengths into account equally).
# The light source is completely homogeneous w.r.t. wavelength
self.nr_bands = 8
self.nr_wavelengths = 200
wavelengths = np.linspace(400, 700, self.nr_wavelengths) * 10**-9 # nm
filters = np.ones((self.nr_bands, self.nr_wavelengths))
self.imaging_system = cam.ImagingSystem(wavelengths=wavelengths,
F=filters)
def test_reflectance_transform(self):
reflectance = 0.5 * np.ones(self.nr_wavelengths)
# we expect a normalized result with all values equal.
expected_measurement = np.ones(self.nr_bands) / self.nr_bands
calc_cam_measurement = cam.transform_reflectance(self.imaging_system,
reflectance)
np.testing.assert_almost_equal(np.squeeze(calc_cam_measurement),
np.squeeze(expected_measurement),
err_msg="test if " +
"camera measurement is " +
"correctly calculated from " +
"given reflectance")
def test_spectrometer_transform(self):
# measurement made by a spectrometer:
spectrometer = 0.5 * np.ones(self.nr_wavelengths)
# we expect a normalized result with all values equal.
expected_measurement = np.ones(self.nr_bands) / self.nr_bands
calc_cam_measurement = cam.transform_color(self.imaging_system,
spectrometer)
np.testing.assert_almost_equal(np.squeeze(calc_cam_measurement),
np.squeeze(expected_measurement),
err_msg="test if " +
"camera measurement is " +
"correctly calculated from " +
"given spectrometer measurement")
def test_difference_transformations(self):
# first set a non uniform lighting
self.imaging_system.w = _n_random_numbers(self.nr_wavelengths)
# also, let one filter differ from the rest
self.imaging_system.F[0, :] = _n_random_numbers(self.nr_wavelengths)
measurement = 0.5 * np.ones(self.nr_wavelengths)
r_transformed = cam.transform_color(self.imaging_system, measurement)
s_transformed = cam.transform_reflectance(self. imaging_system, measurement)
# the two vectors should be different because one transformation is
# taking lighting into account and the other one is not.
self.assertFalse(r_transformed[0] == s_transformed[0], "test if " +
"color and reflectance transformations differ")
def _n_random_numbers(n):
return np.array([np.random.uniform(0.1, 1) for _ in xrange(n)])
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| {
"content_hash": "71154149bdf93a5bef7de38789680658",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 84,
"avg_line_length": 45.586666666666666,
"alnum_prop": 0.5598128107633811,
"repo_name": "swirkert/ipcai2016",
"id": "b930bccdcef5bbb4ee8ee95fd1d5b3cef280c403",
"size": "3419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mc/test/test_camera.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2603174"
},
{
"name": "Python",
"bytes": "197361"
}
],
"symlink_target": ""
} |
from micropython import const
# Bluetooth Status/Error Codes
# Vendor-specific error codes
# Error codes defined by ST related to BlueNRG stack
# The command cannot be executed due to the current state of the device.
BLE_STATUS_FAILED = const(0x41)
# Some parameters are invalid.
BLE_STATUS_INVALID_PARAMS = const(0x42)
# It is not allowed to start the procedure (e.g. another the procedure is
# ongoing or cannot be started on the given handle).
BLE_STATUS_NOT_ALLOWED = const(0x46)
# Unexpected error.
BLE_STATUS_ERROR = const(0x47)
BLE_STATUS_ADDR_NOT_RESOLVED = const(0x48)
FLASH_READ_FAILED = const(0x49)
FLASH_WRITE_FAILED = const(0x4A)
FLASH_ERASE_FAILED = const(0x4B)
BLE_STATUS_INVALID_CID = const(0x50)
TIMER_NOT_VALID_LAYER = const(0x54)
TIMER_INSUFFICIENT_RESOURCES = const(0x55)
BLE_STATUS_CSRK_NOT_FOUND = const(0x5A)
BLE_STATUS_IRK_NOT_FOUND = const(0x5B)
BLE_STATUS_DEV_NOT_FOUND_IN_DB = const(0x5C)
BLE_STATUS_SEC_DB_FULL = const(0x5D)
BLE_STATUS_DEV_NOT_BONDED = const(0x5E)
BLE_STATUS_DEV_IN_BLACKLIST = const(0x5F)
BLE_STATUS_INVALID_HANDLE = const(0x60)
BLE_STATUS_INVALID_PARAMETER = const(0x61)
BLE_STATUS_OUT_OF_HANDLE = const(0x62)
BLE_STATUS_INVALID_OPERATION = const(0x63)
BLE_STATUS_INSUFFICIENT_RESOURCES = const(0x64)
BLE_INSUFFICIENT_ENC_KEYSIZE = const(0x65)
BLE_STATUS_CHARAC_ALREADY_EXISTS = const(0x66)
# Returned when no valid slots are available
# (e.g. when there are no available state machines).
BLE_STATUS_NO_VALID_SLOT = const(0x82)
# Returned when a scan window shorter than minimum allowed value has been
# requested (i.e. 2ms)
BLE_STATUS_SCAN_WINDOW_SHORT = const(0x83)
# Returned when the maximum requested interval to be allocated is shorter then
# the current anchor period and there is no submultiple for the current anchor
# period that is between the minimum and the maximum requested intervals.
BLE_STATUS_NEW_INTERVAL_FAILED = const(0x84)
# Returned when the maximum requested interval to be allocated is greater than
# the current anchor period and there is no multiple of the anchor period that
# is between the minimum and the maximum requested intervals.
BLE_STATUS_INTERVAL_TOO_LARGE = const(0x85)
# Returned when the current anchor period or a new one can be found that is
# compatible to the interval range requested by the new slot but the maximum
# available length that can be allocated is less than the minimum requested
# slot length.
BLE_STATUS_LENGTH_FAILED = const(0x86)
# Library Error Codes
# Error codes defined by ST related to MCU library.
BLE_STATUS_TIMEOUT = const(0xFF)
BLE_STATUS_PROFILE_ALREADY_INITIALIZED = const(0xF0)
BLE_STATUS_NULL_PARAM = const(0xF1)
# Hardware error event codes
# See EVT_HARDWARE_ERROR.
# Error on the SPI bus has been detected, most likely caused by incorrect SPI
# configuration on the external micro-controller.
SPI_FRAMING_ERROR = const(0)
# Caused by a slow crystal startup and they are an indication that the
# HS_STARTUP_TIME in the device configuration needs to be tuned.
# After this event is recommended to hardware reset the device.
RADIO_STATE_ERROR = const(1)
# Caused by a slow crystal startup and they are an indication that the
# HS_STARTUP_TIME in the device configuration needs to be tuned.
# After this event is recommended to hardware reset the device.
TIMER_OVERRUN_ERROR = const(2)
| {
"content_hash": "a16a48456888750ef7d26e8028850f54",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 78,
"avg_line_length": 36.57142857142857,
"alnum_prop": 0.7743389423076923,
"repo_name": "dmazzella/uble",
"id": "5cc7fdfd2399b2d2bcc13792fdddd3be11bd096b",
"size": "3376",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bluetooth_low_energy/protocols/hci/vendor_specifics/st_microelectronics/bluenrg_ms/status.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "257521"
}
],
"symlink_target": ""
} |
from base64 import b64decode
from flask import Response, Blueprint, abort, request
from analyticpi import app
from analyticpi.db import database
from analyticpi.models import PageView
from analyticpi.views import tracking_view
# 1 pixel GIF, base64-encoded.
BEACON = b64decode(
'R0lGODlhAQABAIAAANvf7wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw==')
JAVASCRIPT = """(function(){
var d=document,i=new Image,e=encodeURIComponent;
i.src='%s/track.gif?site=%s&url='+e(d.location.href)+'&ref='+e(d.referrer)+'&t='+e(d.title);
})()""".replace('\n', '')
@tracking_view.route('/track.gif')
def analyze():
if not request.args.get('url'):
abort(404)
print request.remote_addr
with database.transaction():
PageView.create_from_request()
response = Response(BEACON, mimetype='image/gif')
response.headers['Cache-Control'] = 'private, no-cache'
return response
@tracking_view.route('/track.js')
def script():
site_id = request.args.get("site")
if site_id is None:
abort(400)
return Response(JAVASCRIPT % (app.config['ROOT_URL'], site_id),
mimetype='text/javascript')
| {
"content_hash": "35bb2ff4f81de861fbd5c27d4dbab225",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 96,
"avg_line_length": 28.8,
"alnum_prop": 0.6814236111111112,
"repo_name": "analyticpi/analyticpi",
"id": "5c82101f442469a2c15247cf800bfa5e9f37c0ba",
"size": "1152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analyticpi/views/tracking.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "354306"
},
{
"name": "HTML",
"bytes": "17035"
},
{
"name": "JavaScript",
"bytes": "691224"
},
{
"name": "Python",
"bytes": "25264"
}
],
"symlink_target": ""
} |
import logging
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
# Undeclared XML namespace
import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
# <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:xml="http://www.w3.org/XML/1998/namespace">
xsd='''<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:complexType name="structure">
<xs:attributeGroup ref="xml:specialAttrs"/>
</xs:complexType>
<xs:element name="instance" type="structure"/>
</xs:schema>'''
code = pyxb.binding.generate.GeneratePython(schema_text=xsd)
#open('code.py', 'w').write(code)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestTrac0023 (unittest.TestCase):
def testBasic (self):
self.assertEqual(4, len(structure._AttributeMap))
self.assertEqual(pyxb.binding.xml_.STD_ANON_lang, structure._AttributeMap[pyxb.namespace.XML.createExpandedName('lang')].dataType())
self.assertEqual(pyxb.binding.xml_.STD_ANON_space, structure._AttributeMap[pyxb.namespace.XML.createExpandedName('space')].dataType())
self.assertEqual(pyxb.binding.datatypes.anyURI, structure._AttributeMap[pyxb.namespace.XML.createExpandedName('base')].dataType())
self.assertEqual(pyxb.binding.datatypes.ID, structure._AttributeMap[pyxb.namespace.XML.createExpandedName('id')].dataType())
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "45eaab8cd03ab0768b35a6047cbbb1cc",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 142,
"avg_line_length": 36.357142857142854,
"alnum_prop": 0.7177472167648985,
"repo_name": "jonfoster/pyxb-upstream-mirror",
"id": "55e0b16783992ecf025e66917237967e814ee989",
"size": "1551",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/trac/test-trac-0023.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6286"
},
{
"name": "Python",
"bytes": "1854695"
},
{
"name": "Shell",
"bytes": "37524"
}
],
"symlink_target": ""
} |
from core.domain import exp_domain
from core.domain import stats_domain
from core.tests import test_utils
import feconf
import utils
class ExplorationStatsTests(test_utils.GenericTestBase):
"""Tests the ExplorationStats domain object."""
def _get_exploration_stats_from_dict(self, exploration_stats_dict):
state_stats_mapping = {}
for state_name in exploration_stats_dict['state_stats_mapping']:
state_stats_mapping[state_name] = stats_domain.StateStats.from_dict(
exploration_stats_dict['state_stats_mapping'][state_name])
return stats_domain.ExplorationStats(
exploration_stats_dict['exp_id'],
exploration_stats_dict['exp_version'],
exploration_stats_dict['num_starts_v1'],
exploration_stats_dict['num_starts_v2'],
exploration_stats_dict['num_actual_starts_v1'],
exploration_stats_dict['num_actual_starts_v2'],
exploration_stats_dict['num_completions_v1'],
exploration_stats_dict['num_completions_v2'],
state_stats_mapping)
def test_to_dict(self):
state_stats_dict = {
'total_answers_count_v1': 0,
'total_answers_count_v2': 10,
'useful_feedback_count_v1': 0,
'useful_feedback_count_v2': 4,
'total_hit_count_v1': 0,
'total_hit_count_v2': 18,
'first_hit_count_v1': 0,
'first_hit_count_v2': 7,
'num_times_solution_viewed_v2': 2,
'num_completions_v1': 0,
'num_completions_v2': 2
}
expected_exploration_stats_dict = {
'exp_id': 'exp_id1',
'exp_version': 1,
'num_starts_v1': 0,
'num_starts_v2': 30,
'num_actual_starts_v1': 0,
'num_actual_starts_v2': 10,
'num_completions_v1': 0,
'num_completions_v2': 5,
'state_stats_mapping': {
'Home': state_stats_dict
}
}
observed_exploration_stats = self._get_exploration_stats_from_dict(
expected_exploration_stats_dict)
self.assertDictEqual(
expected_exploration_stats_dict,
observed_exploration_stats.to_dict())
def test_validate(self):
state_stats_dict = {
'total_answers_count_v1': 0,
'total_answers_count_v2': 10,
'useful_feedback_count_v1': 0,
'useful_feedback_count_v2': 4,
'total_hit_count_v1': 0,
'total_hit_count_v2': 18,
'first_hit_count_v1': 0,
'first_hit_count_v2': 7,
'num_times_solution_viewed_v2': 2,
'num_completions_v1': 0,
'num_completions_v2': 2
}
exploration_stats_dict = {
'exp_id': 'exp_id1',
'exp_version': 1,
'num_starts_v1': 0,
'num_starts_v2': 30,
'num_actual_starts_v1': 0,
'num_actual_starts_v2': 10,
'num_completions_v1': 0,
'num_completions_v2': 5,
'state_stats_mapping': {
'Home': state_stats_dict
}
}
exploration_stats = self._get_exploration_stats_from_dict(
exploration_stats_dict)
exploration_stats.validate()
# Make the exp_id integer.
exploration_stats.exp_id = 10
with self.assertRaisesRegexp(utils.ValidationError, (
'Expected exp_id to be a string')):
exploration_stats.validate()
# Make the num_actual_starts string.
exploration_stats.exp_id = 'exp_id1'
exploration_stats.num_actual_starts_v2 = '0'
with self.assertRaisesRegexp(utils.ValidationError, (
'Expected num_actual_starts_v2 to be an int')):
exploration_stats.validate()
# Make the state_stats_mapping list.
exploration_stats.num_actual_starts_v2 = 10
exploration_stats.state_stats_mapping = []
with self.assertRaisesRegexp(utils.ValidationError, (
'Expected state_stats_mapping to be a dict')):
exploration_stats.validate()
# Make the num_completions negative.
exploration_stats.state_stats_mapping = {}
exploration_stats.num_completions_v2 = -5
with self.assertRaisesRegexp(utils.ValidationError, (
'%s cannot have negative values' % ('num_completions_v2'))):
exploration_stats.validate()
class StateStatsTests(test_utils.GenericTestBase):
"""Tests the StateStats domain object."""
def test_from_dict(self):
state_stats_dict = {
'total_answers_count_v1': 0,
'total_answers_count_v2': 10,
'useful_feedback_count_v1': 0,
'useful_feedback_count_v2': 4,
'total_hit_count_v1': 0,
'total_hit_count_v2': 18,
'first_hit_count_v1': 0,
'first_hit_count_v2': 7,
'num_times_solution_viewed_v2': 2,
'num_completions_v1': 0,
'num_completions_v2': 2
}
state_stats = stats_domain.StateStats(0, 10, 0, 4, 0, 18, 0, 7, 2, 0, 2)
expected_state_stats = stats_domain.StateStats.from_dict(
state_stats_dict)
self.assertEqual(
state_stats.total_answers_count_v1,
expected_state_stats.total_answers_count_v1)
self.assertEqual(
state_stats.total_answers_count_v2,
expected_state_stats.total_answers_count_v2)
self.assertEqual(
state_stats.useful_feedback_count_v1,
expected_state_stats.useful_feedback_count_v1)
self.assertEqual(
state_stats.useful_feedback_count_v2,
expected_state_stats.useful_feedback_count_v2)
self.assertEqual(
state_stats.total_hit_count_v1,
expected_state_stats.total_hit_count_v1)
self.assertEqual(
state_stats.total_hit_count_v2,
expected_state_stats.total_hit_count_v2)
self.assertEqual(
state_stats.first_hit_count_v1,
expected_state_stats.first_hit_count_v1)
self.assertEqual(
state_stats.first_hit_count_v2,
expected_state_stats.first_hit_count_v2)
self.assertEqual(
state_stats.num_times_solution_viewed_v2,
expected_state_stats.num_times_solution_viewed_v2)
self.assertEqual(
state_stats.num_completions_v1,
expected_state_stats.num_completions_v1)
self.assertEqual(
state_stats.num_completions_v2,
expected_state_stats.num_completions_v2)
def test_create_default(self):
state_stats = stats_domain.StateStats.create_default()
self.assertEqual(state_stats.total_answers_count_v1, 0)
self.assertEqual(state_stats.total_answers_count_v2, 0)
self.assertEqual(state_stats.useful_feedback_count_v1, 0)
self.assertEqual(state_stats.useful_feedback_count_v2, 0)
self.assertEqual(state_stats.total_hit_count_v1, 0)
self.assertEqual(state_stats.total_hit_count_v2, 0)
self.assertEqual(state_stats.total_answers_count_v1, 0)
self.assertEqual(state_stats.total_answers_count_v2, 0)
self.assertEqual(state_stats.num_times_solution_viewed_v2, 0)
self.assertEqual(state_stats.num_completions_v1, 0)
self.assertEqual(state_stats.num_completions_v2, 0)
def test_to_dict(self):
state_stats_dict = {
'total_answers_count_v1': 0,
'total_answers_count_v2': 10,
'useful_feedback_count_v1': 0,
'useful_feedback_count_v2': 4,
'total_hit_count_v1': 0,
'total_hit_count_v2': 18,
'first_hit_count_v1': 0,
'first_hit_count_v2': 7,
'num_times_solution_viewed_v2': 2,
'num_completions_v1': 0,
'num_completions_v2': 2
}
state_stats = stats_domain.StateStats(0, 10, 0, 4, 0, 18, 0, 7, 2, 0, 2)
self.assertEqual(state_stats_dict, state_stats.to_dict())
def test_validation(self):
state_stats = stats_domain.StateStats(0, 10, 0, 4, 0, 18, 0, 7, 2, 0, 2)
state_stats.validate()
# Change total_answers_count to string.
state_stats.total_answers_count_v2 = '10'
with self.assertRaisesRegexp(utils.ValidationError, (
'Expected total_answers_count_v2 to be an int')):
state_stats.validate()
# Make the total_answers_count negative.
state_stats.total_answers_count_v2 = -5
with self.assertRaisesRegexp(utils.ValidationError, (
'%s cannot have negative values' % ('total_answers_count_v2'))):
state_stats.validate()
class StateAnswersTests(test_utils.GenericTestBase):
"""Tests the StateAnswers domain object."""
def test_can_retrieve_properly_constructed_submitted_answer_dict_list(self):
state_answers = stats_domain.StateAnswers(
'exp_id', 1, 'initial_state', 'TextInput', [
stats_domain.SubmittedAnswer(
'Text', 'TextInput', 0, 1,
exp_domain.EXPLICIT_CLASSIFICATION, {}, 'sess', 10.5,
rule_spec_str='rule spec str1', answer_str='answer str1'),
stats_domain.SubmittedAnswer(
'Other text', 'TextInput', 1, 0,
exp_domain.DEFAULT_OUTCOME_CLASSIFICATION, {}, 'sess', 7.5,
rule_spec_str='rule spec str2', answer_str='answer str2')])
submitted_answer_dict_list = (
state_answers.get_submitted_answer_dict_list())
self.assertEqual(submitted_answer_dict_list, [{
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': exp_domain.EXPLICIT_CLASSIFICATION,
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5,
'rule_spec_str': 'rule spec str1',
'answer_str': 'answer str1'
}, {
'answer': 'Other text',
'interaction_id': 'TextInput',
'answer_group_index': 1,
'rule_spec_index': 0,
'classification_categorization': (
exp_domain.DEFAULT_OUTCOME_CLASSIFICATION),
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 7.5,
'rule_spec_str': 'rule spec str2',
'answer_str': 'answer str2'
}])
class StateAnswersValidationTests(test_utils.GenericTestBase):
"""Tests the StateAnswers domain object for validation."""
def setUp(self):
super(StateAnswersValidationTests, self).setUp()
self.state_answers = stats_domain.StateAnswers(
'exp_id', 1, 'initial_state', 'TextInput', [])
# The canonical object should have no validation problems
self.state_answers.validate()
def test_exploration_id_must_be_string(self):
self.state_answers.exploration_id = 0
self._assert_validation_error(
self.state_answers, 'Expected exploration_id to be a string')
def test_state_name_must_be_string(self):
self.state_answers.state_name = ['state']
self._assert_validation_error(
self.state_answers, 'Expected state_name to be a string')
def test_interaction_id_can_be_none(self):
self.state_answers.interaction_id = None
self.state_answers.validate()
def test_interaction_id_must_otherwise_be_string(self):
self.state_answers.interaction_id = 10
self._assert_validation_error(
self.state_answers, 'Expected interaction_id to be a string')
def test_interaction_id_must_refer_to_existing_interaction(self):
self.state_answers.interaction_id = 'FakeInteraction'
self._assert_validation_error(
self.state_answers, 'Unknown interaction_id: FakeInteraction')
def test_submitted_answer_list_must_be_list(self):
self.state_answers.submitted_answer_list = {}
self._assert_validation_error(
self.state_answers, 'Expected submitted_answer_list to be a list')
def test_schema_version_must_be_integer(self):
self.state_answers.schema_version = '1'
self._assert_validation_error(
self.state_answers, 'Expected schema_version to be an integer')
def test_schema_version_must_be_between_one_and_current_version(self):
self.state_answers.schema_version = 0
self._assert_validation_error(
self.state_answers, 'schema_version < 1: 0')
self.state_answers.schema_version = (
feconf.CURRENT_STATE_ANSWERS_SCHEMA_VERSION + 1)
self._assert_validation_error(
self.state_answers,
'schema_version > feconf\\.CURRENT_STATE_ANSWERS_SCHEMA_VERSION')
self.state_answers.schema_version = 1
self.state_answers.validate()
class SubmittedAnswerTests(test_utils.GenericTestBase):
"""Tests the SubmittedAnswer domain object."""
def test_can_be_converted_to_from_full_dict(self):
submitted_answer = stats_domain.SubmittedAnswer(
'Text', 'TextInput', 0, 1, exp_domain.EXPLICIT_CLASSIFICATION, {},
'sess', 10.5, rule_spec_str='rule spec str',
answer_str='answer str')
submitted_answer_dict = submitted_answer.to_dict()
cloned_submitted_answer = stats_domain.SubmittedAnswer.from_dict(
submitted_answer_dict)
self.assertEqual(
cloned_submitted_answer.to_dict(), submitted_answer_dict)
def test_can_be_converted_to_full_dict(self):
submitted_answer = stats_domain.SubmittedAnswer(
'Text', 'TextInput', 0, 1, exp_domain.EXPLICIT_CLASSIFICATION, {},
'sess', 10.5, rule_spec_str='rule spec str',
answer_str='answer str')
self.assertEqual(submitted_answer.to_dict(), {
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': exp_domain.EXPLICIT_CLASSIFICATION,
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5,
'rule_spec_str': 'rule spec str',
'answer_str': 'answer str'
})
def test_dict_may_not_include_rule_spec_str_or_answer_str(self):
submitted_answer = stats_domain.SubmittedAnswer(
'Text', 'TextInput', 0, 1, exp_domain.EXPLICIT_CLASSIFICATION, {},
'sess', 10.5)
self.assertEqual(submitted_answer.to_dict(), {
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': exp_domain.EXPLICIT_CLASSIFICATION,
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
def test_requires_answer_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'answer'):
stats_domain.SubmittedAnswer.from_dict({
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
def test_requires_interaction_id_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'interaction_id'):
stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
def test_requires_answer_group_index_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'answer_group_index'):
stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
def test_requires_rule_spec_index_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'rule_spec_index'):
stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
def test_requires_classification_categ_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'classification_categorization'):
stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
def test_requires_params_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'params'):
stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
def test_requires_session_id_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'session_id'):
stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'time_spent_in_sec': 10.5
})
def test_requires_time_spent_in_sec_to_be_created_from_dict(self):
with self.assertRaisesRegexp(KeyError, 'time_spent_in_sec'):
stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'session_id': 'sess',
})
def test_can_be_created_from_full_dict(self):
submitted_answer = stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5,
'rule_spec_str': 'rule spec str',
'answer_str': 'answer str'
})
self.assertEqual(submitted_answer.answer, 'Text')
self.assertEqual(submitted_answer.interaction_id, 'TextInput')
self.assertEqual(submitted_answer.answer_group_index, 0)
self.assertEqual(submitted_answer.rule_spec_index, 1)
self.assertEqual(
submitted_answer.classification_categorization,
exp_domain.EXPLICIT_CLASSIFICATION)
self.assertEqual(submitted_answer.params, {})
self.assertEqual(submitted_answer.session_id, 'sess')
self.assertEqual(submitted_answer.time_spent_in_sec, 10.5)
self.assertEqual(submitted_answer.rule_spec_str, 'rule spec str')
self.assertEqual(submitted_answer.answer_str, 'answer str')
def test_can_be_created_from_dict_missing_rule_spec_and_answer(self):
submitted_answer = stats_domain.SubmittedAnswer.from_dict({
'answer': 'Text',
'interaction_id': 'TextInput',
'answer_group_index': 0,
'rule_spec_index': 1,
'classification_categorization': (
exp_domain.EXPLICIT_CLASSIFICATION),
'params': {},
'session_id': 'sess',
'time_spent_in_sec': 10.5
})
self.assertEqual(submitted_answer.answer, 'Text')
self.assertEqual(submitted_answer.interaction_id, 'TextInput')
self.assertEqual(submitted_answer.answer_group_index, 0)
self.assertEqual(submitted_answer.rule_spec_index, 1)
self.assertEqual(
submitted_answer.classification_categorization,
exp_domain.EXPLICIT_CLASSIFICATION)
self.assertEqual(submitted_answer.params, {})
self.assertEqual(submitted_answer.session_id, 'sess')
self.assertEqual(submitted_answer.time_spent_in_sec, 10.5)
self.assertIsNone(submitted_answer.rule_spec_str)
self.assertIsNone(submitted_answer.answer_str)
class SubmittedAnswerValidationTests(test_utils.GenericTestBase):
"""Tests the SubmittedAnswer domain object for validation."""
def setUp(self):
super(SubmittedAnswerValidationTests, self).setUp()
self.submitted_answer = stats_domain.SubmittedAnswer(
'Text', 'TextInput', 0, 0, exp_domain.EXPLICIT_CLASSIFICATION, {},
'session_id', 0.)
# The canonical object should have no validation problems
self.submitted_answer.validate()
def test_answer_may_be_none_only_for_linear_interaction(self):
# It's valid for answer to be None if the interaction type is Continue.
self.submitted_answer.answer = None
self._assert_validation_error(
self.submitted_answer,
'SubmittedAnswers must have a provided answer except for linear '
'interactions')
self.submitted_answer.interaction_id = 'Continue'
self.submitted_answer.validate()
def test_time_spent_in_sec_must_not_be_none(self):
self.submitted_answer.time_spent_in_sec = None
self._assert_validation_error(
self.submitted_answer,
'SubmittedAnswers must have a provided time_spent_in_sec')
def test_time_spent_in_sec_must_be_number(self):
self.submitted_answer.time_spent_in_sec = '0'
self._assert_validation_error(
self.submitted_answer, 'Expected time_spent_in_sec to be a number')
def test_time_spent_in_sec_must_be_positive(self):
self.submitted_answer.time_spent_in_sec = -1.
self._assert_validation_error(
self.submitted_answer,
'Expected time_spent_in_sec to be non-negative')
def test_session_id_must_not_be_none(self):
self.submitted_answer.session_id = None
self._assert_validation_error(
self.submitted_answer,
'SubmittedAnswers must have a provided session_id')
def test_session_id_must_be_string(self):
self.submitted_answer.session_id = 90
self._assert_validation_error(
self.submitted_answer, 'Expected session_id to be a string')
def test_params_must_be_dict(self):
self.submitted_answer.params = []
self._assert_validation_error(
self.submitted_answer, 'Expected params to be a dict')
def test_answer_group_index_must_be_integer(self):
self.submitted_answer.answer_group_index = '0'
self._assert_validation_error(
self.submitted_answer,
'Expected answer_group_index to be an integer')
def test_answer_group_index_must_be_positive(self):
self.submitted_answer.answer_group_index = -1
self._assert_validation_error(
self.submitted_answer,
'Expected answer_group_index to be non-negative')
def test_rule_spec_index_must_be_integer(self):
self.submitted_answer.rule_spec_index = '0'
self._assert_validation_error(
self.submitted_answer, 'Expected rule_spec_index to be an integer')
def test_rule_spec_index_must_be_positive(self):
self.submitted_answer.rule_spec_index = -1
self._assert_validation_error(
self.submitted_answer,
'Expected rule_spec_index to be non-negative')
def test_classification_categorization_must_be_valid_category(self):
self.submitted_answer.classification_categorization = (
exp_domain.TRAINING_DATA_CLASSIFICATION)
self.submitted_answer.validate()
self.submitted_answer.classification_categorization = (
exp_domain.STATISTICAL_CLASSIFICATION)
self.submitted_answer.validate()
self.submitted_answer.classification_categorization = (
exp_domain.DEFAULT_OUTCOME_CLASSIFICATION)
self.submitted_answer.validate()
self.submitted_answer.classification_categorization = 'soft'
self._assert_validation_error(
self.submitted_answer,
'Expected valid classification_categorization')
def test_rule_spec_str_must_be_none_or_string(self):
self.submitted_answer.rule_spec_str = 10
self._assert_validation_error(
self.submitted_answer,
'Expected rule_spec_str to be either None or a string')
self.submitted_answer.rule_spec_str = 'str'
self.submitted_answer.validate()
self.submitted_answer.rule_spec_str = None
self.submitted_answer.validate()
def test_answer_str_must_be_none_or_string(self):
self.submitted_answer.answer_str = 10
self._assert_validation_error(
self.submitted_answer,
'Expected answer_str to be either None or a string')
self.submitted_answer.answer_str = 'str'
self.submitted_answer.validate()
self.submitted_answer.answer_str = None
self.submitted_answer.validate()
class StateAnswersCalcOutputValidationTests(test_utils.GenericTestBase):
"""Tests the StateAnswersCalcOutput domain object for validation."""
def setUp(self):
super(StateAnswersCalcOutputValidationTests, self).setUp()
self.state_answers_calc_output = stats_domain.StateAnswersCalcOutput(
'exp_id', 1, 'initial_state', 'TextInput', 'AnswerFrequencies', {})
# The canonical object should have no validation problems
self.state_answers_calc_output.validate()
def test_exploration_id_must_be_string(self):
self.state_answers_calc_output.exploration_id = 0
self._assert_validation_error(
self.state_answers_calc_output,
'Expected exploration_id to be a string')
def test_state_name_must_be_string(self):
self.state_answers_calc_output.state_name = ['state']
self._assert_validation_error(
self.state_answers_calc_output,
'Expected state_name to be a string')
def test_calculation_id_must_be_string(self):
self.state_answers_calc_output.calculation_id = ['calculation id']
self._assert_validation_error(
self.state_answers_calc_output,
'Expected calculation_id to be a string')
def test_calculation_output_must_be_less_than_one_million_bytes(self):
self.state_answers_calc_output.calculation_output = (
['This is not a long sentence.'] * 200000)
self._assert_validation_error(
self.state_answers_calc_output,
'calculation_output is too big to be stored')
| {
"content_hash": "7aa070bd3fb7294edfdffd99fb722805",
"timestamp": "",
"source": "github",
"line_count": 682,
"max_line_length": 80,
"avg_line_length": 41.57184750733138,
"alnum_prop": 0.5945259593679458,
"repo_name": "himanshu-dixit/oppia",
"id": "4053a2829ee2d18ad293c7e787bdde66a60ed840",
"size": "28975",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "core/domain/stats_domain_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "101439"
},
{
"name": "HTML",
"bytes": "899603"
},
{
"name": "JavaScript",
"bytes": "2950299"
},
{
"name": "Python",
"bytes": "3818679"
},
{
"name": "Shell",
"bytes": "47818"
}
],
"symlink_target": ""
} |
def MainWork():
# Open a MySQL connection
try:
dh = dbhandler.MySQLhandler()
except dbhandler.DBException, e:
WriteLog('',str(e))
sys.exit(1)
# Loop over fits files on command line
for file in sys.argv[1:]:
# If file does not exist, do not go further
if not os.path.exists(file):
WriteLog(file,'does not exist')
continue
# Only proceed if filename matches standard NOT format
if not re.search(r'[A-Z]{2}[a-z]{2}[0-9]{6}', file):
WriteLog(file,'filename does not match standard format')
continue
# Extract fits header values using pyfits
# header = [<instrument>,<tbl_prefix>,primaryHDU,ext1HDU,ext2HDU,..,extNHDU]
try:
header = extractHeader(str(file))
except HeaderException, e:
WriteLog(file,str(e))
continue
# Pyfits do not recognize this a a fits file. Make a note and skip file.
if header[0] == 'ERROR':
WriteLog(file,'not a valid fits file')
continue
instrument = header[0]
tbl_prefix = header[1]
# filename (stripped of .fits extension)
fits_nm = str(os.path.basename(file))[:-5]
# Delete any existing data for the Fits Header
dh.deleteFitsHeader(tbl_prefix,fits_nm)
# Insert the Fits Header into Database
try:
dh.insertFitsHeader(header[1:],fits_nm)
except dbhandler.DBException, e:
WriteLog(file,str(e))
continue
# Write firsheader to file
WriteHeader(fits_nm, instrument, header[2:])
# End file loop
# Close database connection
# THIS COMMITS THE TRANSACTION
dh.close()
# Logfile functionality
def WriteLog(fn,str):
timestamp = time.strftime('%Y-%m-%d %H:%M:%S')
debugfile = open(OUTFILEPATH + '/debug.log', 'a')
debugfile.write(timestamp + ' ' + fn + ':' + str)
debugfile.write('\n')
debugfile.close()
def WriteHeader(filename, instrument, header):
# If the directory containing the nightly data does not exist,
# create it and set 777 permissions
dirname = "%s/%s/%s" % (OUTFILEPATH, instrument, filename[:6])
try:
os.stat(dirname)
except OSError:
os.mkdir(dirname)
os.chmod(dirname,0777)
# Define output filenames for individual images
fn = "%s/%s.fits" % (dirname, filename)
# Delete any existing fitsheader file for this image
try:
os.unlink(fn)
except OSError:
pass
# Now write all HDUs to file
fh = open(fn, 'w')
#os.chmod(fn, 0666)
fh.write(str(header[0]))
fh.write('\n')
for exthdu in header[1:]:
fh.write('\n')
fh.write(str(exthdu))
fh.write('\n')
# Close filehandler
fh.close()
if __name__ == "__main__":
import sys, os, time, re
import dbhandler
from fitsheader import extractHeader, HeaderException
# Output file path for log
OUTFILEPATH = '/home/postprocess/fitsheader/logs'
MainWork() | {
"content_hash": "d1bedbddf185db8af2e4374447a261aa",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 80,
"avg_line_length": 23.758333333333333,
"alnum_prop": 0.6439845668186601,
"repo_name": "sniemi/SamPy",
"id": "d526b6dc5f30d4fecf5dcba945b1d1397781cb12",
"size": "2930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sandbox/src2/fitsheader/hdrextract.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "296"
},
{
"name": "C",
"bytes": "68436"
},
{
"name": "C++",
"bytes": "45956"
},
{
"name": "CSS",
"bytes": "35570"
},
{
"name": "Fortran",
"bytes": "45191"
},
{
"name": "HTML",
"bytes": "107435"
},
{
"name": "IDL",
"bytes": "13651"
},
{
"name": "JavaScript",
"bytes": "25435"
},
{
"name": "Makefile",
"bytes": "26035"
},
{
"name": "Matlab",
"bytes": "1508"
},
{
"name": "Perl",
"bytes": "59198"
},
{
"name": "PostScript",
"bytes": "1403536"
},
{
"name": "Prolog",
"bytes": "16061"
},
{
"name": "Python",
"bytes": "5763358"
},
{
"name": "R",
"bytes": "208346"
},
{
"name": "Rebol",
"bytes": "161"
},
{
"name": "Roff",
"bytes": "73616"
},
{
"name": "Ruby",
"bytes": "2032"
},
{
"name": "Shell",
"bytes": "41512"
},
{
"name": "Tcl",
"bytes": "44150"
},
{
"name": "TeX",
"bytes": "107783"
}
],
"symlink_target": ""
} |
from django.db import models
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import BaseUserManager
from django.core.validators import validate_email
from django.core.exceptions import ValidationError, ObjectDoesNotExist
import re
from django.db import OperationalError
from buildbuild import custom_msg
class UserManager(BaseUserManager):
def create_user(self, email, password, **kwargs):
if not email:
raise ValueError("User must have an email address")
validate_email(email)
self.validate_password(password)
user = self.model(email = self.normalize_email(email))
user.set_password(password)
if "name" in kwargs:
self.validate_name(kwargs['name'])
user.name = kwargs["name"]
if "phonenumber" in kwargs:
self.validate_phonenumber(kwargs["phonenumber"])
user.phonenumber = kwargs["phonenumber"]
user.save(using = self._db)
return user
def create_superuser(self, email, password, **kwargs):
user = self.create_user(email, password = password, **kwargs)
user.is_admin = True
user.is_staff = True
user.save(using = self._db)
return user
def validate_password(self, password):
if len(password) < 6:
raise ValidationError(
"user password length should be at least 6",
)
elif len(password) > 255:
raise ValidationError(
"user password max length is 255",
)
def validate_phonenumber(self, phonenumber):
if len(phonenumber) < 8:
raise ValidationError(
"user phonenumber length should be at least 8",
)
elif len(phonenumber) > 30:
raise ValidationError(
"user phonenumber max length 30",
)
if bool(re.match('^[0-9]+$', phonenumber)) is False:
raise ValidationError("user phonenumber should not be with character")
def validate_name(self, name):
if len(name) > 30:
raise ValidationError((custom_msg.user_name_max_length_error),
code = 'invalid')
if bool(re.match('^[a-zA-Z0-9_-]+$', name)) is False:
raise ValidationError("user name cannot contain things but alphabet, '_', number")
def get_user(self, id):
try:
user = User.objects.get(id=id)
except ObjectDoesNotExist:
raise ObjectDoesNotExist("not exist user id")
return user
def delete_user(self, id):
user = User.objects.get(id=id)
user.delete()
# For debugging, it will be deleted soon
if user.id is None:
return True
else:
raise OperationalError("delete user failed")
def update_user(self, id, **kwargs):
user = User.objects.get_user(id)
if 'name' or 'phonenumber' in kwargs:
if 'phonenumber' in kwargs:
self.validate_phonenumber(kwargs['phonenumber'])
user.phonenumber = kwargs['phonenumber']
user.save(using = self._db)
if 'name' in kwargs:
self.validate_name(kwargs['name'])
user.name = kwargs['name']
user.save(using = self._db)
return True
else:
raise ValueError("required name or phonenumber")
# get member from foreign key of MtoM
def get_member(self, id):
try:
member = self.get(id = id)
except ObjectDoesNotExist:
raise ObjectDoesNotExist(
str(id) + "is not contained in team member DB"
)
else:
return member
# get wait_member from foreign key of MtoM
def get_wait_member(self, id):
try:
wait_member = self.get(id = id)
except ObjectDoesNotExist:
raise ObjectDoesNotExist(
str(id) + "is not contained in team wait member DB"
)
else:
return wait_member
class User(AbstractBaseUser):
name = models.CharField(
help_text = "User name",
max_length = 30,
)
email = models.EmailField(
help_text = "User email",
max_length = 50,
unique = True,
)
is_active = models.BooleanField(
help_text = "User is_active to judge if the account is active",
default=True
)
is_admin = models.BooleanField(
help_text = "User is_admin for administrator permission",
default=False
)
is_staff = models.BooleanField(
help_text = "User is_staff for user can access admin site",
default=False
)
phonenumber = models.CharField(
help_text = "User phonenumber",
max_length=18
)
USERNAME_FIELD = 'email'
# custom UserManager
objects = UserManager()
def deactivate(self):
self.is_active = False
return self.is_active
def activate(self):
self.is_active = True
return self.is_active
#TODO : To use the functions get_full_name and get_short_name
def get_full_name(self):
return self.email
def get_short_name(self):
return self.email
def has_perm(self, perm, obj=None):
return self.is_admin
def is_staff(self):
return self.is_admin
#TODO : need to check django method
def has_module_perms(self, app_label):
return True
| {
"content_hash": "f5eff228656f63decde2bcaf31ba8045",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 94,
"avg_line_length": 30.914893617021278,
"alnum_prop": 0.557639366827254,
"repo_name": "buildbuild/buildbuild",
"id": "8f6725a315a22006ae6b66e7e4b2744f65e0d773",
"size": "5812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "buildbuild/users/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "259939"
},
{
"name": "JavaScript",
"bytes": "70053"
},
{
"name": "Python",
"bytes": "207512"
},
{
"name": "Shell",
"bytes": "1546"
}
],
"symlink_target": ""
} |
import os
from collections import namedtuple
from collections.abc import Sequence
from urllib.parse import urlparse, urlunparse
import ipaddress
import markdown
from mkdocs import utils, theme, plugins
from mkdocs.config.base import Config, ValidationError
class BaseConfigOption:
def __init__(self):
self.warnings = []
self.default = None
def is_required(self):
return False
def validate(self, value):
return self.run_validation(value)
def reset_warnings(self):
self.warnings = []
def pre_validation(self, config, key_name):
"""
Before all options are validated, perform a pre-validation process.
The pre-validation process method should be implemented by subclasses.
"""
def run_validation(self, value):
"""
Perform validation for a value.
The run_validation method should be implemented by subclasses.
"""
return value
def post_validation(self, config, key_name):
"""
After all options have passed validation, perform a post-validation
process to do any additional changes dependant on other config values.
The post-validation process method should be implemented by subclasses.
"""
class SubConfig(BaseConfigOption, Config):
def __init__(self, *config_options):
BaseConfigOption.__init__(self)
Config.__init__(self, config_options)
self.default = {}
def validate(self, value):
self.load_dict(value)
return self.run_validation(value)
def run_validation(self, value):
Config.validate(self)
return self
class ConfigItems(BaseConfigOption):
"""
Config Items Option
Validates a list of mappings that all must match the same set of
options.
"""
def __init__(self, *config_options, **kwargs):
BaseConfigOption.__init__(self)
self.item_config = SubConfig(*config_options)
self.required = kwargs.get('required', False)
def __repr__(self):
return f'{self.__class__.__name__}: {self.item_config}'
def run_validation(self, value):
if value is None:
if self.required:
raise ValidationError("Required configuration not provided.")
else:
return ()
if not isinstance(value, Sequence):
raise ValidationError(f'Expected a sequence of mappings, but a '
f'{type(value)} was given.')
return [self.item_config.validate(item) for item in value]
class OptionallyRequired(BaseConfigOption):
"""
A subclass of BaseConfigOption that adds support for default values and
required values. It is a base class for config options.
"""
def __init__(self, default=None, required=False):
super().__init__()
self.default = default
self.required = required
def is_required(self):
return self.required
def validate(self, value):
"""
Perform some initial validation.
If the option is empty (None) and isn't required, leave it as such. If
it is empty but has a default, use that. Finally, call the
run_validation method on the subclass unless.
"""
if value is None:
if self.default is not None:
if hasattr(self.default, 'copy'):
# ensure no mutable values are assigned
value = self.default.copy()
else:
value = self.default
elif not self.required:
return
elif self.required:
raise ValidationError("Required configuration not provided.")
return self.run_validation(value)
class Type(OptionallyRequired):
"""
Type Config Option
Validate the type of a config option against a given Python type.
"""
def __init__(self, type_, length=None, **kwargs):
super().__init__(**kwargs)
self._type = type_
self.length = length
def run_validation(self, value):
if not isinstance(value, self._type):
msg = f"Expected type: {self._type} but received: {type(value)}"
elif self.length is not None and len(value) != self.length:
msg = ("Expected type: {0} with length {2} but received: {1} with "
"length {3}").format(self._type, value, self.length,
len(value))
else:
return value
raise ValidationError(msg)
class Choice(OptionallyRequired):
"""
Choice Config Option
Validate the config option against a strict set of values.
"""
def __init__(self, choices, **kwargs):
super().__init__(**kwargs)
try:
length = len(choices)
except TypeError:
length = 0
if not length or isinstance(choices, str):
raise ValueError(f'Expected iterable of choices, got {choices}')
self.choices = choices
def run_validation(self, value):
if value not in self.choices:
msg = f"Expected one of: {self.choices} but received: {value}"
else:
return value
raise ValidationError(msg)
class Deprecated(BaseConfigOption):
"""
Deprecated Config Option
Raises a warning as the option is deprecated. Uses `message` for the
warning. If `move_to` is set to the name of a new config option, the value
is moved to the new option on pre_validation. If `option_type` is set to a
ConfigOption instance, then the value is validated against that type.
"""
def __init__(self, moved_to=None, message='', option_type=None):
super().__init__()
self.default = None
self.moved_to = moved_to
self.message = message or (
'The configuration option {} has been deprecated and '
'will be removed in a future release of MkDocs.'
)
self.option = option_type or BaseConfigOption()
self.warnings = self.option.warnings
def pre_validation(self, config, key_name):
self.option.pre_validation(config, key_name)
if config.get(key_name) is not None:
self.warnings.append(self.message.format(key_name))
if self.moved_to is not None:
if '.' not in self.moved_to:
target = config
target_key = self.moved_to
else:
move_to, target_key = self.moved_to.rsplit('.', 1)
target = config
for key in move_to.split('.'):
target = target.setdefault(key, {})
if not isinstance(target, dict):
# We can't move it for the user
return
target[target_key] = config.pop(key_name)
def validate(self, value):
return self.option.validate(value)
def post_validation(self, config, key_name):
self.option.post_validation(config, key_name)
def reset_warnings(self):
self.option.reset_warnings()
self.warnings = self.option.warnings
class IpAddress(OptionallyRequired):
"""
IpAddress Config Option
Validate that an IP address is in an apprioriate format
"""
def run_validation(self, value):
try:
host, port = value.rsplit(':', 1)
except Exception:
raise ValidationError("Must be a string of format 'IP:PORT'")
if host != 'localhost':
try:
# Validate and normalize IP Address
host = str(ipaddress.ip_address(host))
except ValueError as e:
raise ValidationError(e)
try:
port = int(port)
except Exception:
raise ValidationError(f"'{port}' is not a valid port")
class Address(namedtuple('Address', 'host port')):
def __str__(self):
return f'{self.host}:{self.port}'
return Address(host, port)
def post_validation(self, config, key_name):
host = config[key_name].host
if key_name == 'dev_addr' and host in ['0.0.0.0', '::']:
self.warnings.append(
("The use of the IP address '{}' suggests a production environment "
"or the use of a proxy to connect to the MkDocs server. However, "
"the MkDocs' server is intended for local development purposes only. "
"Please use a third party production-ready server instead.").format(host)
)
class URL(OptionallyRequired):
"""
URL Config Option
Validate a URL by requiring a scheme is present.
"""
def __init__(self, default='', required=False, is_dir=False):
self.is_dir = is_dir
super().__init__(default, required)
def pre_validation(self, config, key_name):
# TODO: replace this with an error in a future release (1.3?)
user_defined_keys = sum([list(x.keys()) for x in config.user_configs], [])
if key_name == 'site_url' and key_name not in user_defined_keys:
self.warnings.append(
'This option is now required. Set to a valid URL or '
'an empty string to avoid an error in a future release.'
)
def run_validation(self, value):
if value == '':
return value
try:
parsed_url = urlparse(value)
except (AttributeError, TypeError):
raise ValidationError("Unable to parse the URL.")
if parsed_url.scheme:
if self.is_dir and not parsed_url.path.endswith('/'):
parsed_url = parsed_url._replace(path=f'{parsed_url.path}/')
return urlunparse(parsed_url)
raise ValidationError(
"The URL isn't valid, it should include the http:// (scheme)")
def post_validation(self, config, key_name):
if key_name == 'site_url':
if config[key_name] in ['', None] and config['use_directory_urls']:
config['use_directory_urls'] = False
self.warnings.append(
"The 'use_directory_urls' option has been disabled because "
"'site_url' contains an empty value. Either define a valid "
"URL for 'site_url' or set 'use_directory_urls' to False."
)
class RepoURL(URL):
"""
Repo URL Config Option
A small extension to the URL config that sets the repo_name and edit_uri,
based on the url if they haven't already been provided.
"""
def post_validation(self, config, key_name):
repo_host = urlparse(config['repo_url']).netloc.lower()
edit_uri = config.get('edit_uri')
# derive repo_name from repo_url if unset
if config['repo_url'] is not None and config.get('repo_name') is None:
if repo_host == 'github.com':
config['repo_name'] = 'GitHub'
elif repo_host == 'bitbucket.org':
config['repo_name'] = 'Bitbucket'
elif repo_host == 'gitlab.com':
config['repo_name'] = 'GitLab'
else:
config['repo_name'] = repo_host.split('.')[0].title()
# derive edit_uri from repo_name if unset
if config['repo_url'] is not None and edit_uri is None:
if repo_host == 'github.com' or repo_host == 'gitlab.com':
edit_uri = 'edit/master/docs/'
elif repo_host == 'bitbucket.org':
edit_uri = 'src/default/docs/'
else:
edit_uri = ''
# ensure a well-formed edit_uri
if edit_uri:
if not edit_uri.startswith(('?', '#')) \
and not config['repo_url'].endswith('/'):
config['repo_url'] += '/'
if not edit_uri.endswith('/'):
edit_uri += '/'
config['edit_uri'] = edit_uri
class FilesystemObject(Type):
"""
Base class for options that point to filesystem objects.
"""
def __init__(self, exists=False, **kwargs):
super().__init__(type_=str, **kwargs)
self.exists = exists
self.config_dir = None
def pre_validation(self, config, key_name):
self.config_dir = os.path.dirname(config.config_file_path) if config.config_file_path else None
def run_validation(self, value):
value = super().run_validation(value)
if self.config_dir and not os.path.isabs(value):
value = os.path.join(self.config_dir, value)
if self.exists and not self.existence_test(value):
raise ValidationError(f"The path {value} isn't an existing {self.name}.")
value = os.path.abspath(value)
assert isinstance(value, str)
return value
class Dir(FilesystemObject):
"""
Dir Config Option
Validate a path to a directory, optionally verifying that it exists.
"""
existence_test = staticmethod(os.path.isdir)
name = 'directory'
def post_validation(self, config, key_name):
if config.config_file_path is None:
return
# Validate that the dir is not the parent dir of the config file.
if os.path.dirname(config.config_file_path) == config[key_name]:
raise ValidationError(
("The '{0}' should not be the parent directory of the config "
"file. Use a child directory instead so that the '{0}' "
"is a sibling of the config file.").format(key_name))
class File(FilesystemObject):
"""
File Config Option
Validate a path to a file, optionally verifying that it exists.
"""
existence_test = staticmethod(os.path.isfile)
name = 'file'
class SiteDir(Dir):
"""
SiteDir Config Option
Validates the site_dir and docs_dir directories do not contain each other.
"""
def post_validation(self, config, key_name):
super().post_validation(config, key_name)
# Validate that the docs_dir and site_dir don't contain the
# other as this will lead to copying back and forth on each
# and eventually make a deep nested mess.
if (config['docs_dir'] + os.sep).startswith(config['site_dir'].rstrip(os.sep) + os.sep):
raise ValidationError(
("The 'docs_dir' should not be within the 'site_dir' as this "
"can mean the source files are overwritten by the output or "
"it will be deleted if --clean is passed to mkdocs build."
"(site_dir: '{}', docs_dir: '{}')"
).format(config['site_dir'], config['docs_dir']))
elif (config['site_dir'] + os.sep).startswith(config['docs_dir'].rstrip(os.sep) + os.sep):
raise ValidationError(
("The 'site_dir' should not be within the 'docs_dir' as this "
"leads to the build directory being copied into itself and "
"duplicate nested files in the 'site_dir'."
"(site_dir: '{}', docs_dir: '{}')"
).format(config['site_dir'], config['docs_dir']))
class Theme(BaseConfigOption):
"""
Theme Config Option
Validate that the theme exists and build Theme instance.
"""
def __init__(self, default=None):
super().__init__()
self.default = default
def validate(self, value):
if value is None and self.default is not None:
value = {'name': self.default}
if isinstance(value, str):
value = {'name': value}
themes = utils.get_theme_names()
if isinstance(value, dict):
if 'name' in value:
if value['name'] is None or value['name'] in themes:
return value
raise ValidationError(
f"Unrecognised theme name: '{value['name']}'. "
f"The available installed themes are: {', '.join(themes)}"
)
raise ValidationError("No theme name set.")
raise ValidationError(f'Invalid type "{type(value)}". Expected a string or key/value pairs.')
def post_validation(self, config, key_name):
theme_config = config[key_name]
if not theme_config['name'] and 'custom_dir' not in theme_config:
raise ValidationError("At least one of 'theme.name' or 'theme.custom_dir' must be defined.")
# Ensure custom_dir is an absolute path
if 'custom_dir' in theme_config and not os.path.isabs(theme_config['custom_dir']):
config_dir = os.path.dirname(config.config_file_path)
theme_config['custom_dir'] = os.path.join(config_dir, theme_config['custom_dir'])
if 'custom_dir' in theme_config and not os.path.isdir(theme_config['custom_dir']):
raise ValidationError("The path set in {name}.custom_dir ('{path}') does not exist.".
format(path=theme_config['custom_dir'], name=key_name))
if 'locale' in theme_config and not isinstance(theme_config['locale'], str):
raise ValidationError("'{name}.locale' must be a string.".format(name=theme_config['name']))
config[key_name] = theme.Theme(**theme_config)
class Nav(OptionallyRequired):
"""
Nav Config Option
Validate the Nav config. Automatically add all markdown files if empty.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.file_match = utils.is_markdown_file
def run_validation(self, value):
if not isinstance(value, list):
raise ValidationError(f"Expected a list, got {type(value)}")
if len(value) == 0:
return
config_types = {type(item) for item in value}
if config_types.issubset({str, dict}):
return value
raise ValidationError("Invalid pages config. {} {}".format(
config_types, {str, dict}
))
def post_validation(self, config, key_name):
# TODO: remove this when `pages` config setting is fully deprecated.
if key_name == 'pages' and config['pages'] is not None:
if config['nav'] is None:
# copy `pages` config to new 'nav' config setting
config['nav'] = config['pages']
warning = ("The 'pages' configuration option has been deprecated and will "
"be removed in a future release of MkDocs. Use 'nav' instead.")
self.warnings.append(warning)
class Private(OptionallyRequired):
"""
Private Config Option
A config option only for internal use. Raises an error if set by the user.
"""
def run_validation(self, value):
raise ValidationError('For internal use only.')
class MarkdownExtensions(OptionallyRequired):
"""
Markdown Extensions Config Option
A list or dict of extensions. Each list item may contain either a string or a one item dict.
A string must be a valid Markdown extension name with no config options defined. The key of
a dict item must be a valid Markdown extension name and the value must be a dict of config
options for that extension. Extension configs are set on the private setting passed to
`configkey`. The `builtins` keyword accepts a list of extensions which cannot be overriden by
the user. However, builtins can be duplicated to define config options for them if desired. """
def __init__(self, builtins=None, configkey='mdx_configs', **kwargs):
super().__init__(**kwargs)
self.builtins = builtins or []
self.configkey = configkey
self.configdata = {}
def validate_ext_cfg(self, ext, cfg):
if not isinstance(ext, str):
raise ValidationError(f"'{ext}' is not a valid Markdown Extension name.")
if not cfg:
return
if not isinstance(cfg, dict):
raise ValidationError(f"Invalid config options for Markdown Extension '{ext}'.")
self.configdata[ext] = cfg
def run_validation(self, value):
if not isinstance(value, (list, tuple, dict)):
raise ValidationError('Invalid Markdown Extensions configuration')
extensions = []
if isinstance(value, dict):
for ext, cfg in value.items():
self.validate_ext_cfg(ext, cfg)
extensions.append(ext)
else:
for item in value:
if isinstance(item, dict):
if len(item) > 1:
raise ValidationError('Invalid Markdown Extensions configuration')
ext, cfg = item.popitem()
self.validate_ext_cfg(ext, cfg)
extensions.append(ext)
elif isinstance(item, str):
extensions.append(item)
else:
raise ValidationError('Invalid Markdown Extensions configuration')
extensions = utils.reduce_list(self.builtins + extensions)
# Confirm that Markdown considers extensions to be valid
try:
markdown.Markdown(extensions=extensions, extension_configs=self.configdata)
except Exception as e:
raise ValidationError(e.args[0])
return extensions
def post_validation(self, config, key_name):
config[self.configkey] = self.configdata
class Plugins(OptionallyRequired):
"""
Plugins config option.
A list or dict of plugins. If a plugin defines config options those are used when
initializing the plugin class.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.installed_plugins = plugins.get_plugins()
self.config_file_path = None
def pre_validation(self, config, key_name):
self.config_file_path = config.config_file_path
def run_validation(self, value):
if not isinstance(value, (list, tuple, dict)):
raise ValidationError('Invalid Plugins configuration. Expected a list or dict.')
plgins = plugins.PluginCollection()
if isinstance(value, dict):
for name, cfg in value.items():
plgins[name] = self.load_plugin(name, cfg)
else:
for item in value:
if isinstance(item, dict):
if len(item) > 1:
raise ValidationError('Invalid Plugins configuration')
name, cfg = item.popitem()
item = name
else:
cfg = {}
plgins[item] = self.load_plugin(item, cfg)
return plgins
def load_plugin(self, name, config):
if not isinstance(name, str):
raise ValidationError(f"'{name}' is not a valid plugin name.")
if name not in self.installed_plugins:
raise ValidationError(f'The "{name}" plugin is not installed')
config = config or {} # Users may define a null (None) config
if not isinstance(config, dict):
raise ValidationError(f"Invalid config options for the '{name}' plugin.")
Plugin = self.installed_plugins[name].load()
if not issubclass(Plugin, plugins.BasePlugin):
raise ValidationError('{}.{} must be a subclass of {}.{}'.format(
Plugin.__module__, Plugin.__name__, plugins.BasePlugin.__module__,
plugins.BasePlugin.__name__))
plugin = Plugin()
errors, warnings = plugin.load_config(config, self.config_file_path)
self.warnings.extend(warnings)
errors_message = '\n'.join(
f"Plugin '{name}' value: '{x}'. Error: {y}"
for x, y in errors
)
if errors_message:
raise ValidationError(errors_message)
return plugin
| {
"content_hash": "dd06ba3608ed13dbeb076aaaf914a27e",
"timestamp": "",
"source": "github",
"line_count": 684,
"max_line_length": 104,
"avg_line_length": 34.9546783625731,
"alnum_prop": 0.5855117319837718,
"repo_name": "waylan/mkdocs",
"id": "858fe7ce4c1ebe06fa66c0468c9cd5842a8b5602",
"size": "23909",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mkdocs/config/config_options.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "9145"
},
{
"name": "HTML",
"bytes": "28174"
},
{
"name": "JavaScript",
"bytes": "115264"
},
{
"name": "Python",
"bytes": "376024"
}
],
"symlink_target": ""
} |
import numpy as np
from math import pi, log
import pylab
from scipy import fft, ifft
from scipy.optimize import curve_fit
i = 10000
x = np.linspace(0, 3.5 * pi, i)
y = (0.3*np.sin(x) + np.sin(1.3 * x) + 0.9 * np.sin(4.2 * x) + 0.06 *
np.random.randn(i))
def _datacheck_peakdetect(x_axis, y_axis):
if x_axis is None:
x_axis = range(len(y_axis))
if len(y_axis) != len(x_axis):
raise (ValueError,
'Input vectors y_axis and x_axis must have same length')
#needs to be a numpy array
y_axis = np.array(y_axis)
x_axis = np.array(x_axis)
return x_axis, y_axis
def _peakdetect_parabole_fitter(raw_peaks, x_axis, y_axis, points):
"""
Performs the actual parabole fitting for the peakdetect_parabole function.
keyword arguments:
raw_peaks -- A list of either the maximium or the minimum peaks, as given
by the peakdetect_zero_crossing function, with index used as x-axis
x_axis -- A numpy list of all the x values
y_axis -- A numpy list of all the y values
points -- How many points around the peak should be used during curve
fitting, must be odd.
return -- A list giving all the peaks and the fitted waveform, format:
[[x, y, [fitted_x, fitted_y]]]
"""
func = lambda x, k, tau, m: k * ((x - tau) ** 2) + m
fitted_peaks = []
for peak in raw_peaks:
index = peak[0]
x_data = x_axis[index - points // 2: index + points // 2 + 1]
y_data = y_axis[index - points // 2: index + points // 2 + 1]
# get a first approximation of tau (peak position in time)
tau = x_axis[index]
# get a first approximation of peak amplitude
m = peak[1]
# build list of approximations
# k = -m as first approximation?
p0 = (-m, tau, m)
popt, pcov = curve_fit(func, x_data, y_data, p0)
# retrieve tau and m i.e x and y value of peak
x, y = popt[1:3]
# create a high resolution data set for the fitted waveform
x2 = np.linspace(x_data[0], x_data[-1], points * 10)
y2 = func(x2, *popt)
fitted_peaks.append([x, y, [x2, y2]])
return fitted_peaks
def peakdetect(y_axis, x_axis = None, lookahead = 300, delta=0):
"""
Converted from/based on a MATLAB script at:
http://billauer.co.il/peakdet.html
function for detecting local maximas and minmias in a signal.
Discovers peaks by searching for values which are surrounded by lower
or larger values for maximas and minimas respectively
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- (optional) A x-axis whose values correspond to the y_axis list
and is used in the return to specify the postion of the peaks. If
omitted an index of the y_axis is used. (default: None)
lookahead -- (optional) distance to look ahead from a peak candidate to
determine if it is the actual peak (default: 200)
'(sample / period) / f' where '4 >= f >= 1.25' might be a good value
delta -- (optional) this specifies a minimum difference between a peak and
the following points, before a peak may be considered a peak. Useful
to hinder the function from picking up false peaks towards to end of
the signal. To work well delta should be set to delta >= RMSnoise * 5.
(default: 0)
delta function causes a 20% decrease in speed, when omitted
Correctly used it can double the speed of the function
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
max_peaks = []
min_peaks = []
dump = [] #Used to pop the first hit which almost always is false
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
# store data length for later use
length = len(y_axis)
#perform some checks
if lookahead < 1:
raise ValueError, "Lookahead must be '1' or above in value"
if not (np.isscalar(delta) and delta >= 0):
raise ValueError, "delta must be a positive number"
#maxima and minima candidates are temporarily stored in
#mx and mn respectively
mn, mx = np.Inf, -np.Inf
#Only detect peak if there is 'lookahead' amount of points after it
for index, (x, y) in enumerate(zip(x_axis[:-lookahead],
y_axis[:-lookahead])):
if y > mx:
mx = y
mxpos = x
if y < mn:
mn = y
mnpos = x
####look for max####
if y < mx-delta and mx != np.Inf:
#Maxima peak candidate found
#look ahead in signal to ensure that this is a peak and not jitter
if y_axis[index:index+lookahead].max() < mx:
max_peaks.append([mxpos, mx])
dump.append(True)
#set algorithm to only find minima now
mx = np.Inf
mn = np.Inf
if index+lookahead >= length:
#end is within lookahead no more peaks can be found
break
continue
#else: #slows shit down this does
# mx = ahead
# mxpos = x_axis[np.where(y_axis[index:index+lookahead]==mx)]
####look for min####
if y > mn+delta and mn != -np.Inf:
#Minima peak candidate found
#look ahead in signal to ensure that this is a peak and not jitter
if y_axis[index:index+lookahead].min() > mn:
min_peaks.append([mnpos, mn])
dump.append(False)
#set algorithm to only find maxima now
mn = -np.Inf
mx = -np.Inf
if index+lookahead >= length:
#end is within lookahead no more peaks can be found
break
#else: #slows shit down this does
# mn = ahead
# mnpos = x_axis[np.where(y_axis[index:index+lookahead]==mn)]
#Remove the false hit on the first value of the y_axis
try:
if dump[0]:
max_peaks.pop(0)
else:
min_peaks.pop(0)
del dump
except IndexError:
#no peaks were found, should the function return empty lists?
pass
return [max_peaks, min_peaks]
def peakdetect_fft(y_axis, x_axis, pad_len = 5):
"""
Performs a FFT calculation on the data and zero-pads the results to
increase the time domain resolution after performing the inverse fft and
send the data to the 'peakdetect' function for peak
detection.
Omitting the x_axis is forbidden as it would make the resulting x_axis
value silly if it was returned as the index 50.234 or similar.
Will find at least 1 less peak then the 'peakdetect_zero_crossing'
function, but should result in a more precise value of the peak as
resolution has been increased. Some peaks are lost in an attempt to
minimize spectral leakage by calculating the fft between two zero
crossings for n amount of signal periods.
The biggest time eater in this function is the ifft and thereafter it's
the 'peakdetect' function which takes only half the time of the ifft.
Speed improvementd could include to check if 2**n points could be used for
fft and ifft or change the 'peakdetect' to the 'peakdetect_zero_crossing',
which is maybe 10 times faster than 'peakdetct'. The pro of 'peakdetect'
is that it resutls in one less lost peak. It should also be noted that the
time used by the ifft function can change greatly depending on the input.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
pad_len -- (optional) By how many times the time resolution should be
increased by, e.g. 1 doubles the resolution. The amount is rounded up
to the nearest 2 ** n amount (default: 5)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
zero_indices = zero_crossings(y_axis, window = 11)
#select a n amount of periods
last_indice = - 1 - (1 - len(zero_indices) & 1)
# Calculate the fft between the first and last zero crossing
# this method could be ignored if the begining and the end of the signal
# are discardable as any errors induced from not using whole periods
# should mainly manifest in the beginning and the end of the signal, but
# not in the rest of the signal
fft_data = fft(y_axis[zero_indices[0]:zero_indices[last_indice]])
padd = lambda x, c: x[:len(x) // 2] + [0] * c + x[len(x) // 2:]
n = lambda x: int(log(x)/log(2)) + 1
# padds to 2**n amount of samples
fft_padded = padd(list(fft_data), 2 **
n(len(fft_data) * pad_len) - len(fft_data))
# There is amplitude decrease directly proportional to the sample increase
sf = len(fft_padded) / float(len(fft_data))
# There might be a leakage giving the result an imaginary component
# Return only the real component
y_axis_ifft = ifft(fft_padded).real * sf #(pad_len + 1)
x_axis_ifft = np.linspace(
x_axis[zero_indices[0]], x_axis[zero_indices[last_indice]],
len(y_axis_ifft))
# get the peaks to the interpolated waveform
max_peaks, min_peaks = peakdetect(y_axis_ifft, x_axis_ifft, 500,
delta = abs(np.diff(y_axis).max() * 2))
#max_peaks, min_peaks = peakdetect_zero_crossing(y_axis_ifft, x_axis_ifft)
# store one 20th of a period as waveform data
data_len = int(np.diff(zero_indices).mean()) / 10
data_len += 1 - data_len & 1
fitted_wave = []
for peaks in [max_peaks, min_peaks]:
peak_fit_tmp = []
index = 0
for peak in peaks:
index = np.where(x_axis_ifft[index:]==peak[0])[0][0] + index
x_fit_lim = x_axis_ifft[index - data_len // 2:
index + data_len // 2 + 1]
y_fit_lim = y_axis_ifft[index - data_len // 2:
index + data_len // 2 + 1]
peak_fit_tmp.append([x_fit_lim, y_fit_lim])
fitted_wave.append(peak_fit_tmp)
#pylab.plot(range(len(fft_data)), fft_data)
#pylab.show()
pylab.plot(x_axis, y_axis)
pylab.hold(True)
pylab.plot(x_axis_ifft, y_axis_ifft)
#for max_p in max_peaks:
# pylab.plot(max_p[0], max_p[1], 'xr')
pylab.show()
return [max_peaks, min_peaks]
def peakdetect_parabole(y_axis, x_axis, points = 9):
"""
Function for detecting local maximas and minmias in a signal.
Discovers peaks by fitting the model function: y = k (x - tau) ** 2 + m
to the peaks. The amount of points used in the fitting is set by the
points argument.
Omitting the x_axis is forbidden as it would make the resulting x_axis
value silly if it was returned as index 50.234 or similar.
will find the same amount of peaks as the 'peakdetect_zero_crossing'
function, but might result in a more precise value of the peak.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
points -- (optional) How many points around the peak should be used during
curve fitting, must be odd (default: 9)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a list
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*max_peaks)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
# make the points argument odd
points += 1 - points % 2
#points += 1 - int(points) & 1 slower when int conversion needed
# get raw peaks
max_raw, min_raw = peakdetect_zero_crossing(y_axis)
# define output variable
max_peaks = []
min_peaks = []
max_ = _peakdetect_parabole_fitter(max_raw, x_axis, y_axis, points)
min_ = _peakdetect_parabole_fitter(min_raw, x_axis, y_axis, points)
max_peaks = map(lambda x: [x[0], x[1]], max_)
max_fitted = map(lambda x: x[-1], max_)
min_peaks = map(lambda x: [x[0], x[1]], min_)
min_fitted = map(lambda x: x[-1], min_)
#pylab.plot(x_axis, y_axis)
#pylab.hold(True)
#for max_p, max_f in zip(max_peaks, max_fitted):
# pylab.plot(max_p[0], max_p[1], 'x')
# pylab.plot(max_f[0], max_f[1], 'o', markersize = 2)
#for min_p, min_f in zip(min_peaks, min_fitted):
# pylab.plot(min_p[0], min_p[1], 'x')
# pylab.plot(min_f[0], min_f[1], 'o', markersize = 2)
#pylab.show()
return [max_peaks, min_peaks]
def peakdetect_sine(y_axis, x_axis, points = 9, lock_frequency = False):
"""
Function for detecting local maximas and minmias in a signal.
Discovers peaks by fitting the model function:
y = A * sin(2 * pi * f * x - tau) to the peaks. The amount of points used
in the fitting is set by the points argument.
Omitting the x_axis is forbidden as it would make the resulting x_axis
value silly if it was returned as index 50.234 or similar.
will find the same amount of peaks as the 'peakdetect_zero_crossing'
function, but might result in a more precise value of the peak.
The function might have some problems if the sine wave has a
non-negligible total angle i.e. a k*x component, as this messes with the
internal offset calculation of the peaks, might be fixed by fitting a
k * x + m function to the peaks for offset calculation.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
points -- (optional) How many points around the peak should be used during
curve fitting, must be odd (default: 9)
lock_frequency -- (optional) Specifies if the frequency argument of the
model function should be locked to the value calculated from the raw
peaks or if optimization process may tinker with it. (default: False)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
# make the points argument odd
points += 1 - points % 2
#points += 1 - int(points) & 1 slower when int conversion needed
# get raw peaks
max_raw, min_raw = peakdetect_zero_crossing(y_axis)
# define output variable
max_peaks = []
min_peaks = []
# get global offset
offset = np.mean([np.mean(max_raw, 0)[1], np.mean(min_raw, 0)[1]])
# fitting a k * x + m function to the peaks might be better
#offset_func = lambda x, k, m: k * x + m
# calculate an approximate frequenzy of the signal
Hz = []
for raw in [max_raw, min_raw]:
if len(raw) > 1:
peak_pos = [x_axis[index] for index in zip(*raw)[0]]
Hz.append(np.mean(np.diff(peak_pos)))
Hz = 1 / np.mean(Hz)
# model function
# if cosine is used then tau could equal the x position of the peak
# if sine were to be used then tau would be the first zero crossing
if lock_frequency:
func = lambda x, A, tau: A * np.sin(2 * pi * Hz * (x - tau) + pi / 2)
else:
func = lambda x, A, Hz, tau: A * np.sin(2 * pi * Hz * (x - tau) +
pi / 2)
#func = lambda x, A, Hz, tau: A * np.cos(2 * pi * Hz * (x - tau))
#get peaks
fitted_peaks = []
for raw_peaks in [max_raw, min_raw]:
peak_data = []
for peak in raw_peaks:
index = peak[0]
x_data = x_axis[index - points // 2: index + points // 2 + 1]
y_data = y_axis[index - points // 2: index + points // 2 + 1]
# get a first approximation of tau (peak position in time)
tau = x_axis[index]
# get a first approximation of peak amplitude
A = peak[1]
# build list of approximations
if lock_frequency:
p0 = (A, tau)
else:
p0 = (A, Hz, tau)
# subtract offset from waveshape
y_data -= offset
popt, pcov = curve_fit(func, x_data, y_data, p0)
# retrieve tau and A i.e x and y value of peak
x = popt[-1]
y = popt[0]
# create a high resolution data set for the fitted waveform
x2 = np.linspace(x_data[0], x_data[-1], points * 10)
y2 = func(x2, *popt)
# add the offset to the results
y += offset
y2 += offset
y_data += offset
peak_data.append([x, y, [x2, y2]])
fitted_peaks.append(peak_data)
# structure date for output
max_peaks = map(lambda x: [x[0], x[1]], fitted_peaks[0])
max_fitted = map(lambda x: x[-1], fitted_peaks[0])
min_peaks = map(lambda x: [x[0], x[1]], fitted_peaks[1])
min_fitted = map(lambda x: x[-1], fitted_peaks[1])
#pylab.plot(x_axis, y_axis)
#pylab.hold(True)
#for max_p, max_f in zip(max_peaks, max_fitted):
# pylab.plot(max_p[0], max_p[1], 'x')
# pylab.plot(max_f[0], max_f[1], 'o', markersize = 2)
#for min_p, min_f in zip(min_peaks, min_fitted):
# pylab.plot(min_p[0], min_p[1], 'x')
# pylab.plot(min_f[0], min_f[1], 'o', markersize = 2)
#pylab.show()
return [max_peaks, min_peaks]
def peakdetect_sine_locked(y_axis, x_axis, points = 9):
"""
Convinience function for calling the 'peakdetect_sine' function with
the lock_frequency argument as True.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- A x-axis whose values correspond to the y_axis list and is used
in the return to specify the postion of the peaks.
points -- (optional) How many points around the peak should be used during
curve fitting, must be odd (default: 9)
return -- see 'peakdetect_sine'
"""
return peakdetect_sine(y_axis, x_axis, points, True)
def peakdetect_zero_crossing(y_axis, x_axis = None, window = 11):
"""
Function for detecting local maximas and minmias in a signal.
Discovers peaks by dividing the signal into bins and retrieving the
maximum and minimum value of each the even and odd bins respectively.
Division into bins is performed by smoothing the curve and finding the
zero crossings.
Suitable for repeatable signals, where some noise is tolerated. Excecutes
faster than 'peakdetect', although this function will break if the offset
of the signal is too large. It should also be noted that the first and
last peak will probably not be found, as this function only can find peaks
between the first and last zero crossing.
keyword arguments:
y_axis -- A list containg the signal over which to find peaks
x_axis -- (optional) A x-axis whose values correspond to the y_axis list
and is used in the return to specify the postion of the peaks. If
omitted an index of the y_axis is used. (default: None)
window -- the dimension of the smoothing window; should be an odd integer
(default: 11)
return -- two lists [max_peaks, min_peaks] containing the positive and
negative peaks respectively. Each cell of the lists contains a tupple
of: (position, peak_value)
to get the average peak value do: np.mean(max_peaks, 0)[1] on the
results to unpack one of the lists into x, y coordinates do:
x, y = zip(*tab)
"""
# check input data
x_axis, y_axis = _datacheck_peakdetect(x_axis, y_axis)
zero_indices = zero_crossings(y_axis, window = window)
period_lengths = np.diff(zero_indices)
bins_y = [y_axis[index:index + diff] for index, diff in
zip(zero_indices, period_lengths)]
bins_x = [x_axis[index:index + diff] for index, diff in
zip(zero_indices, period_lengths)]
even_bins_y = bins_y[::2]
odd_bins_y = bins_y[1::2]
even_bins_x = bins_x[::2]
odd_bins_x = bins_x[1::2]
hi_peaks_x = []
lo_peaks_x = []
#check if even bin contains maxima
if abs(even_bins_y[0].max()) > abs(even_bins_y[0].min()):
hi_peaks = [bin.max() for bin in even_bins_y]
lo_peaks = [bin.min() for bin in odd_bins_y]
# get x values for peak
for bin_x, bin_y, peak in zip(even_bins_x, even_bins_y, hi_peaks):
hi_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
for bin_x, bin_y, peak in zip(odd_bins_x, odd_bins_y, lo_peaks):
lo_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
else:
hi_peaks = [bin.max() for bin in odd_bins_y]
lo_peaks = [bin.min() for bin in even_bins_y]
# get x values for peak
for bin_x, bin_y, peak in zip(odd_bins_x, odd_bins_y, hi_peaks):
hi_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
for bin_x, bin_y, peak in zip(even_bins_x, even_bins_y, lo_peaks):
lo_peaks_x.append(bin_x[np.where(bin_y==peak)[0][0]])
max_peaks = [[x, y] for x,y in zip(hi_peaks_x, hi_peaks)]
min_peaks = [[x, y] for x,y in zip(lo_peaks_x, lo_peaks)]
return [max_peaks, min_peaks]
def _smooth(x, window_len=11, window='hanning'):
"""
smooth the data using a window of the requested size.
This method is based on the convolution of a scaled window on the signal.
The signal is prepared by introducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the begining and end part of the output signal.
input:
x: the input signal
window_len: the dimension of the smoothing window; should be an odd
integer
window: the type of window from 'flat', 'hanning', 'hamming',
'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
the smoothed signal
example:
t = linspace(-2,2,0.1)
x = sin(t)+randn(len(t))*0.1
y = _smooth(x)
see also:
numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman,
numpy.convolve, scipy.signal.lfilter
TODO: the window parameter could be the window itself if a list instead of
a string
"""
if x.ndim != 1:
raise ValueError, "smooth only accepts 1 dimension arrays."
if x.size < window_len:
raise ValueError, "Input vector needs to be bigger than window size."
if window_len<3:
return x
if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise(ValueError,
"Window is not one of '{0}', '{1}', '{2}', '{3}', '{4}'".format(
*('flat', 'hanning', 'hamming', 'bartlett', 'blackman')))
s = np.r_[x[window_len-1:0:-1], x, x[-1:-window_len:-1]]
#print(len(s))
if window == 'flat': #moving average
w = np.ones(window_len,'d')
else:
w = eval('np.' + window + '(window_len)')
y = np.convolve(w / w.sum(), s, mode = 'valid')
return y
def zero_crossings(y_axis, window = 11):
"""
Algorithm to find zero crossings. Smoothens the curve and finds the
zero-crossings by looking for a sign change.
keyword arguments:
y_axis -- A list containg the signal over which to find zero-crossings
window -- the dimension of the smoothing window; should be an odd integer
(default: 11)
return -- the index for each zero-crossing
"""
# smooth the curve
length = len(y_axis)
x_axis = np.asarray(range(length), int)
# discard tail of smoothed signal
y_axis = _smooth(y_axis, window)[:length]
zero_crossings = np.where(np.diff(np.sign(y_axis)))[0]
indices = [x_axis[index] for index in zero_crossings]
# check if zero-crossings are valid
diff = np.diff(indices)
if diff.std() / diff.mean() > 0.2:
print diff.std() / diff.mean()
print np.diff(indices)
raise(ValueError,
"False zero-crossings found, indicates problem {0} or {1}".format(
"with smoothing window", "problem with offset"))
# check if any zero crossings were found
if len(zero_crossings) < 1:
raise(ValueError, "No zero crossings found")
return indices
# used this to test the fft function's sensitivity to spectral leakage
#return indices + np.asarray(30 * np.random.randn(len(indices)), int)
############################Frequency calculation#############################
# diff = np.diff(indices)
# time_p_period = diff.mean()
#
# if diff.std() / time_p_period > 0.1:
# raise ValueError,
# "smoothing window too small, false zero-crossing found"
#
# #return frequency
# return 1.0 / time_p_period
##############################################################################
def _test_zero():
_max, _min = peakdetect_zero_crossing(y,x)
def _test():
_max, _min = peakdetect(y,x, delta=0.30)
def _test_graph():
i = 10000
x = np.linspace(0,3.7*pi,i)
y = (0.3*np.sin(x) + np.sin(1.3 * x) + 0.9 * np.sin(4.2 * x) + 0.06 *
np.random.randn(i))
y *= -1
x = range(i)
_max, _min = peakdetect(y,x,750, 0.30)
xm = [p[0] for p in _max]
ym = [p[1] for p in _max]
xn = [p[0] for p in _min]
yn = [p[1] for p in _min]
plot = pylab.plot(x,y)
pylab.hold(True)
pylab.plot(xm, ym, 'r+')
pylab.plot(xn, yn, 'g+')
_max, _min = peak_det_bad.peakdetect(y, 0.7, x)
xm = [p[0] for p in _max]
ym = [p[1] for p in _max]
xn = [p[0] for p in _min]
yn = [p[1] for p in _min]
pylab.plot(xm, ym, 'y*')
pylab.plot(xn, yn, 'k*')
pylab.show()
if __name__ == "__main__":
from math import pi
import pylab
i = 10000
x = np.linspace(0,3.7*pi,i)
y = (0.3*np.sin(x) + np.sin(1.3 * x) + 0.9 * np.sin(4.2 * x) + 0.06 *
np.random.randn(i))
y *= -1
_max, _min = peakdetect(y, x, 750, 0.30)
xm = [p[0] for p in _max]
ym = [p[1] for p in _max]
xn = [p[0] for p in _min]
yn = [p[1] for p in _min]
plot = pylab.plot(x, y)
pylab.hold(True)
pylab.plot(xm, ym, 'r+')
pylab.plot(xn, yn, 'g+')
pylab.show() | {
"content_hash": "7788f30841188de46059cb643224fd2d",
"timestamp": "",
"source": "github",
"line_count": 733,
"max_line_length": 78,
"avg_line_length": 38.551159618008185,
"alnum_prop": 0.590947696227617,
"repo_name": "zhuww/planetclient",
"id": "d7cf46924b07222b7003f9407b2d2e768704df8f",
"size": "28258",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "peakdetect.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FORTRAN",
"bytes": "11799"
},
{
"name": "Matlab",
"bytes": "2345"
},
{
"name": "Python",
"bytes": "66640"
}
],
"symlink_target": ""
} |
"""Handler for the image of the sdk API."""
import json
from zvmconnector import connector
from zvmsdk import config
from zvmsdk import log
from zvmsdk import utils
from zvmsdk.sdkwsgi.handlers import tokens
from zvmsdk.sdkwsgi.schemas import image
from zvmsdk.sdkwsgi import util
from zvmsdk.sdkwsgi import validation
_IMAGEACTION = None
CONF = config.CONF
LOG = log.LOG
class ImageAction(object):
def __init__(self):
self.client = connector.ZVMConnector(connection_type='socket',
ip_addr=CONF.sdkserver.bind_addr,
port=CONF.sdkserver.bind_port)
@validation.schema(image.create)
def create(self, body):
image = body['image']
image_name = image['image_name']
url = image['url']
remote_host = image.get('remote_host', None)
image_meta = image['image_meta']
info = self.client.send_request('image_import', image_name,
url, image_meta, remote_host)
return info
@validation.query_schema(image.query)
def get_root_disk_size(self, req, name):
# FIXME: this param need combined image nameg, e.g the profile
# name, not the given image name from customer side
info = self.client.send_request('image_get_root_disk_size',
name)
return info
def delete(self, name):
info = self.client.send_request('image_delete', name)
return info
@validation.query_schema(image.query)
def query(self, req, name):
info = self.client.send_request('image_query', name)
return info
@validation.schema(image.export)
def export(self, name, body):
location = body['location']
dest_url = location['dest_url']
remotehost = location.get('remote_host', None)
info = self.client.send_request('image_export', name,
dest_url, remotehost)
return info
def get_action():
global _IMAGEACTION
if _IMAGEACTION is None:
_IMAGEACTION = ImageAction()
return _IMAGEACTION
@util.SdkWsgify
@tokens.validate
def image_create(req):
def _image_create(req):
action = get_action()
body = util.extract_json(req.body)
return action.create(body=body)
info = _image_create(req)
info_json = json.dumps(info)
req.response.body = utils.to_utf8(info_json)
req.response.status = util.get_http_code_from_sdk_return(info,
additional_handler = util.handle_already_exists)
req.response.content_type = 'application/json'
return req.response
@util.SdkWsgify
@tokens.validate
def image_get_root_disk_size(req):
def _image_get_root_disk_size(req, name):
action = get_action()
return action.get_root_disk_size(req, name)
name = util.wsgi_path_item(req.environ, 'name')
info = _image_get_root_disk_size(req, name)
info_json = json.dumps(info)
req.response.body = utils.to_utf8(info_json)
req.response.content_type = 'application/json'
req.response.status = util.get_http_code_from_sdk_return(info)
return req.response
@util.SdkWsgify
@tokens.validate
def image_delete(req):
def _image_delete(name):
action = get_action()
return action.delete(name)
name = util.wsgi_path_item(req.environ, 'name')
info = _image_delete(name)
info_json = json.dumps(info)
req.response.body = utils.to_utf8(info_json)
req.response.status = util.get_http_code_from_sdk_return(info, default=200)
req.response.content_type = 'application/json'
return req.response
@util.SdkWsgify
@tokens.validate
def image_export(req):
def _image_export(name, req):
action = get_action()
body = util.extract_json(req.body)
return action.export(name, body=body)
name = util.wsgi_path_item(req.environ, 'name')
info = _image_export(name, req)
info_json = json.dumps(info)
req.response.body = utils.to_utf8(info_json)
req.response.status = util.get_http_code_from_sdk_return(info,
additional_handler=util.handle_not_found)
req.response.content_type = 'application/json'
return req.response
@util.SdkWsgify
@tokens.validate
def image_query(req):
def _image_query(imagename, req):
action = get_action()
return action.query(req, imagename)
imagename = None
if 'imagename' in req.GET:
imagename = req.GET['imagename']
info = _image_query(imagename, req)
info_json = json.dumps(info)
req.response.body = utils.to_utf8(info_json)
req.response.status = util.get_http_code_from_sdk_return(info,
additional_handler = util.handle_not_found)
req.response.content_type = 'application/json'
return req.response
| {
"content_hash": "c52eb5446337cf6006a83f4d2e0b1389",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 79,
"avg_line_length": 29.35151515151515,
"alnum_prop": 0.6411315300433615,
"repo_name": "mfcloud/python-zvm-sdk",
"id": "1657210ca59ae9a2c3272f2ab42d58a62a708bd6",
"size": "5450",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zvmsdk/sdkwsgi/handlers/image.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2538773"
},
{
"name": "C++",
"bytes": "952"
},
{
"name": "Makefile",
"bytes": "50059"
},
{
"name": "Python",
"bytes": "1590423"
},
{
"name": "Shell",
"bytes": "145392"
},
{
"name": "Smarty",
"bytes": "10342"
}
],
"symlink_target": ""
} |
import struct
import png
class FormatError(Exception):
pass
class Profile:
"""An International Color Consortium Profile (ICC Profile)."""
def __init__(self):
self.rawtagtable = None
self.rawtagdict = {}
self.d = dict()
def fromFile(self, inp, name='<unknown>'):
# See [ICC 2004]
profile = inp.read(128)
if len(profile) < 128:
raise FormatError("ICC Profile is too short.")
size, = struct.unpack('>L', profile[:4])
profile += inp.read(d['size'] - len(profile))
return self.fromString(profile, name)
def fromString(self, profile, name='<unknown>'):
self.d = dict()
d = self.d
if len(profile) < 128:
raise FormatError("ICC Profile is too short.")
d.update(
zip(['size', 'preferredCMM', 'version',
'profileclass', 'colourspace', 'pcs'],
struct.unpack('>L4sL4s4s4s', profile[:24])))
if len(profile) < d['size']:
warnings.warn(
'Profile size declared to be %d, but only got %d bytes' %
(d['size'], len(profile)))
d['version'] = '%08x' % d['version']
d['created'] = readICCdatetime(profile[24:36])
d.update(
zip(['acsp', 'platform', 'flag', 'manufacturer', 'model'],
struct.unpack('>4s4s3L', profile[36:56])))
if d['acsp'] != 'acsp':
warnings.warn('acsp field not present (not an ICC Profile?).')
d['deviceattributes'] = profile[56:64]
d['intent'], = struct.unpack('>L', profile[64:68])
d['pcsilluminant'] = readICCXYZNumber(profile[68:80])
d['creator'] = profile[80:84]
d['id'] = profile[84:100]
ntags, = struct.unpack('>L', profile[128:132])
d['ntags'] = ntags
fmt = '4s2L' * ntags
# tag table
tt = struct.unpack('>' + fmt, profile[132:132+12*ntags])
tt = group(tt, 3)
# Could (should) detect 2 or more tags having the same sig. But
# we don't. Two or more tags with the same sig is illegal per
# the ICC spec.
# Convert (sig,offset,size) triples into (sig,value) pairs.
rawtag = map(lambda x: (x[0], profile[x[1]:x[1]+x[2]]), tt)
self.rawtagtable = rawtag
self.rawtagdict = dict(rawtag)
tag = dict()
# Interpret the tags whose types we know about
for sig, v in rawtag:
if sig in tag:
warnings.warn("Duplicate tag %r found. Ignoring." % sig)
continue
v = ICCdecode(v)
if v is not None:
tag[sig] = v
self.tag = tag
return self
def greyInput(self):
"""Adjust ``self.d`` dictionary for greyscale input device.
``profileclass`` is 'scnr', ``colourspace`` is 'GRAY', ``pcs``
is 'XYZ '.
"""
self.d.update(dict(profileclass='scnr',
colourspace='GRAY', pcs='XYZ '))
return self
def maybeAddDefaults(self):
if self.rawtagdict:
return
self._addTags(
cprt='Copyright unknown.',
desc='created by $URL$ $Rev$',
wtpt=D50(),
)
def addTags(self, **k):
self.maybeAddDefaults()
self._addTags(**k)
def _addTags(self, **k):
"""Helper for :meth:`addTags`."""
for tag, thing in k.items():
if not isinstance(thing, (tuple, list)):
thing = (thing,)
typetag = defaulttagtype[tag]
self.rawtagdict[tag] = encode(typetag, *thing)
return self
def write(self, out):
"""Write ICC Profile to the file."""
if not self.rawtagtable:
self.rawtagtable = self.rawtagdict.items()
tags = tagblock(self.rawtagtable)
self.writeHeader(out, 128 + len(tags))
out.write(tags)
out.flush()
return self
def writeHeader(self, out, size=999):
"""Add default values to the instance's `d` dictionary, then
write a header out onto the file stream. The size of the
profile must be specified using the `size` argument.
"""
def defaultkey(d, key, value):
"""Add ``[key]==value`` to the dictionary `d`, but only if
it does not have that key already.
"""
if key in d:
return
d[key] = value
z = '\x00' * 4
defaults = dict(preferredCMM=z,
version='02000000',
profileclass=z,
colourspace=z,
pcs='XYZ ',
created=writeICCdatetime(),
acsp='acsp',
platform=z,
flag=0,
manufacturer=z,
model=0,
deviceattributes=0,
intent=0,
pcsilluminant=encodefuns()['XYZ'](*D50()),
creator=z,
)
for k,v in defaults.items():
defaultkey(self.d, k, v)
hl = map(self.d.__getitem__,
['preferredCMM', 'version', 'profileclass', 'colourspace',
'pcs', 'created', 'acsp', 'platform', 'flag',
'manufacturer', 'model', 'deviceattributes', 'intent',
'pcsilluminant', 'creator'])
# Convert to struct.pack input
hl[1] = int(hl[1], 16)
out.write(struct.pack('>L4sL4s4s4s12s4s4sL4sLQL12s4s', size, *hl))
out.write('\x00' * 44)
return self
def encodefuns():
"""Returns a dictionary mapping ICC type signature sig to encoding
function. Each function returns a string comprising the content of
the encoded value. To form the full value, the type sig and the 4
zero bytes should be prefixed (8 bytes).
"""
def desc(ascii):
"""Return textDescription type [ICC 2001] 6.5.17. The ASCII part is
filled in with the string `ascii`, the Unicode and ScriptCode parts
are empty."""
ascii += '\x00'
l = len(ascii)
return struct.pack('>L%ds2LHB67s' % l,
l, ascii, 0, 0, 0, 0, '')
def text(ascii):
"""Return textType [ICC 2001] 6.5.18."""
return ascii + '\x00'
def curv(f=None, n=256):
"""Return a curveType, [ICC 2001] 6.5.3. If no arguments are
supplied then a TRC for a linear response is generated (no entries).
If an argument is supplied and it is a number (for *f* to be a
number it means that ``float(f)==f``) then a TRC for that
gamma value is generated.
Otherwise `f` is assumed to be a function that maps [0.0, 1.0] to
[0.0, 1.0]; an `n` element table is generated for it.
"""
if f is None:
return struct.pack('>L', 0)
try:
if float(f) == f:
return struct.pack('>LH', 1, int(round(f*2**8)))
except (TypeError, ValueError):
pass
assert n >= 2
table = []
M = float(n-1)
for i in range(n):
x = i/M
table.append(int(round(f(x) * 65535)))
return struct.pack('>L%dH' % n, n, *table)
def XYZ(*l):
return struct.pack('>3l', *map(fs15f16, l))
return locals()
# Tag type defaults.
# Most tags can only have one or a few tag types.
# When encoding, we associate a default tag type with each tag so that
# the encoding is implicit.
defaulttagtype=dict(
A2B0='mft1',
A2B1='mft1',
A2B2='mft1',
bXYZ='XYZ',
bTRC='curv',
B2A0='mft1',
B2A1='mft1',
B2A2='mft1',
calt='dtim',
targ='text',
chad='sf32',
chrm='chrm',
cprt='desc',
crdi='crdi',
dmnd='desc',
dmdd='desc',
devs='',
gamt='mft1',
kTRC='curv',
gXYZ='XYZ',
gTRC='curv',
lumi='XYZ',
meas='',
bkpt='XYZ',
wtpt='XYZ',
ncol='',
ncl2='',
resp='',
pre0='mft1',
pre1='mft1',
pre2='mft1',
desc='desc',
pseq='',
psd0='data',
psd1='data',
psd2='data',
psd3='data',
ps2s='data',
ps2i='data',
rXYZ='XYZ',
rTRC='curv',
scrd='desc',
scrn='',
tech='sig',
bfd='',
vued='desc',
view='view',
)
def encode(tsig, *l):
"""Encode a Python value as an ICC type. `tsig` is the type
signature to (the first 4 bytes of the encoded value, see [ICC 2004]
section 10.
"""
fun = encodefuns()
if tsig not in fun:
raise "No encoder for type %r." % tsig
v = fun[tsig](*l)
# Padd tsig out with spaces.
tsig = (tsig + ' ')[:4]
return tsig + '\x00'*4 + v
def tagblock(tag):
"""`tag` should be a list of (*signature*, *element*) pairs, where
*signature* (the key) is a length 4 string, and *element* is the
content of the tag element (another string).
The entire tag block (consisting of first a table and then the
element data) is constructed and returned as a string.
"""
n = len(tag)
tablelen = 12*n
# Build the tag table in two parts. A list of 12-byte tags, and a
# string of element data. Offset is the offset from the start of
# the profile to the start of the element data (so the offset for
# the next element is this offset plus the length of the element
# string so far).
offset = 128 + tablelen + 4
# The table. As a string.
table = ''
# The element data
element = ''
for k,v in tag:
table += struct.pack('>4s2L', k, offset + len(element), len(v))
element += v
return struct.pack('>L', n) + table + element
def iccp(out, inp):
profile = Profile().fromString(*profileFromPNG(inp))
print >>out, profile.d
print >>out, map(lambda x: x[0], profile.rawtagtable)
print >>out, profile.tag
def profileFromPNG(inp):
"""Extract profile from PNG file. Return (*profile*, *name*)
pair."""
r = png.Reader(file=inp)
_,chunk = r.chunk('iCCP')
i = chunk.index('\x00')
name = chunk[:i]
compression = chunk[i+1]
assert compression == chr(0)
profile = chunk[i+2:].decode('zlib')
return profile, name
def iccpout(out, inp):
"""Extract ICC Profile from PNG file `inp` and write it to
the file `out`."""
out.write(profileFromPNG(inp)[0])
def fs15f16(x):
"""Convert float to ICC s15Fixed16Number (as a Python ``int``)."""
return int(round(x * 2**16))
def D50():
"""Return D50 illuminant as an (X,Y,Z) triple."""
# See [ICC 2001] A.1
return (0.9642, 1.0000, 0.8249)
def writeICCdatetime(t=None):
"""`t` should be a gmtime tuple (as returned from
``time.gmtime()``). If not supplied, the current time will be used.
Return an ICC dateTimeNumber in a 12 byte string.
"""
import time
if t is None:
t = time.gmtime()
return struct.pack('>6H', *t[:6])
def readICCdatetime(s):
"""Convert from 12 byte ICC representation of dateTimeNumber to
ISO8601 string. See [ICC 2004] 5.1.1"""
return '%04d-%02d-%02dT%02d:%02d:%02dZ' % struct.unpack('>6H', s)
def readICCXYZNumber(s):
"""Convert from 12 byte ICC representation of XYZNumber to (x,y,z)
triple of floats. See [ICC 2004] 5.1.11"""
return s15f16l(s)
def s15f16l(s):
"""Convert sequence of ICC s15Fixed16 to list of float."""
# Note: As long as float has at least 32 bits of mantissa, all
# values are preserved.
n = len(s)//4
t = struct.unpack('>%dl' % n, s)
return map((2**-16).__mul__, t)
# Several types and their byte encodings are defined by [ICC 2004]
# section 10. When encoded, a value begins with a 4 byte type
# signature. We use the same 4 byte type signature in the names of the
# Python functions that decode the type into a Pythonic representation.
def ICCdecode(s):
"""Take an ICC encoded tag, and dispatch on its type signature
(first 4 bytes) to decode it into a Python value. Pair (*sig*,
*value*) is returned, where *sig* is a 4 byte string, and *value* is
some Python value determined by the content and type.
"""
sig = s[0:4].strip()
f=dict(text=RDtext,
XYZ=RDXYZ,
curv=RDcurv,
vcgt=RDvcgt,
sf32=RDsf32,
)
if sig not in f:
return None
return (sig, f[sig](s))
def RDXYZ(s):
"""Convert ICC XYZType to rank 1 array of trimulus values."""
# See [ICC 2001] 6.5.26
assert s[0:4] == 'XYZ '
return readICCXYZNumber(s[8:])
def RDsf32(s):
"""Convert ICC s15Fixed16ArrayType to list of float."""
# See [ICC 2004] 10.18
assert s[0:4] == 'sf32'
return s15f16l(s[8:])
def RDmluc(s):
"""Convert ICC multiLocalizedUnicodeType. This types encodes
several strings together with a language/country code for each
string. A list of (*lc*, *string*) pairs is returned where *lc* is
the 4 byte language/country code, and *string* is the string
corresponding to that code. It seems unlikely that the same
language/country code will appear more than once with different
strings, but the ICC standard does not prohibit it."""
# See [ICC 2004] 10.13
assert s[0:4] == 'mluc'
n,sz = struct.unpack('>2L', s[8:16])
assert sz == 12
record = []
for i in range(n):
lc,l,o = struct.unpack('4s2L', s[16+12*n:28+12*n])
record.append(lc, s[o:o+l])
# How are strings encoded?
return record
def RDtext(s):
"""Convert ICC textType to Python string."""
# Note: type not specified or used in [ICC 2004], only in older
# [ICC 2001].
# See [ICC 2001] 6.5.18
assert s[0:4] == 'text'
return s[8:-1]
def RDcurv(s):
"""Convert ICC curveType."""
# See [ICC 2001] 6.5.3
assert s[0:4] == 'curv'
count, = struct.unpack('>L', s[8:12])
if count == 0:
return dict(gamma=1)
table = struct.unpack('>%dH' % count, s[12:])
if count == 1:
return dict(gamma=table[0]*2**-8)
return table
def RDvcgt(s):
"""Convert Apple CMVideoCardGammaType."""
# See
# http://developer.apple.com/documentation/GraphicsImaging/Reference/ColorSync_Manager/Reference/reference.html#//apple_ref/c/tdef/CMVideoCardGammaType
assert s[0:4] == 'vcgt'
tagtype, = struct.unpack('>L', s[8:12])
if tagtype != 0:
return s[8:]
if tagtype == 0:
# Table.
channels,count,size = struct.unpack('>3H', s[12:18])
if size == 1:
fmt = 'B'
elif size == 2:
fmt = 'H'
else:
return s[8:]
l = len(s[18:])//size
t = struct.unpack('>%d%s' % (l, fmt), s[18:])
t = group(t, count)
return size, t
return s[8:]
def group(s, n):
# See
# http://www.python.org/doc/2.6/library/functions.html#zip
return zip(*[iter(s)]*n)
def main(argv=None):
import sys
from getopt import getopt
if argv is None:
argv = sys.argv
argv = argv[1:]
opt,arg = getopt(argv, 'o:')
if len(arg) > 0:
inp = open(arg[0], 'rb')
else:
inp = sys.stdin
for o,v in opt:
if o == '-o':
f = open(v, 'wb')
return iccpout(f, inp)
return iccp(sys.stdout, inp)
if __name__ == '__main__':
main()
| {
"content_hash": "19eed4964c476b16cb02d8d66085b676",
"timestamp": "",
"source": "github",
"line_count": 508,
"max_line_length": 155,
"avg_line_length": 30.106299212598426,
"alnum_prop": 0.5544658035831045,
"repo_name": "mnaberez/pypng",
"id": "a56ffc498636aa79cf9eeb643612c553bc80e4fc",
"size": "16035",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/iccp.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "250426"
},
{
"name": "Shell",
"bytes": "646"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.codegen.antlr.java.antlr_gen import AntlrGen
from pants.base.deprecated import deprecated_module
deprecated_module('1.5.0dev0', 'Use pants.backend.codegen.antlr.java instead')
AntlrGen = AntlrGen
| {
"content_hash": "034e82b36232904cb538e2358439cc6e",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 93,
"avg_line_length": 37.1,
"alnum_prop": 0.7466307277628033,
"repo_name": "mateor/pants",
"id": "88341323fc0b2933de36a8d322982b86589a320b",
"size": "518",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/codegen/tasks/antlr_gen.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "781"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "GAP",
"bytes": "2459"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "1746"
},
{
"name": "HTML",
"bytes": "79866"
},
{
"name": "Java",
"bytes": "459514"
},
{
"name": "JavaScript",
"bytes": "29992"
},
{
"name": "Protocol Buffer",
"bytes": "4749"
},
{
"name": "Python",
"bytes": "5386641"
},
{
"name": "Rust",
"bytes": "70804"
},
{
"name": "Scala",
"bytes": "80844"
},
{
"name": "Shell",
"bytes": "64072"
},
{
"name": "Thrift",
"bytes": "2046"
}
],
"symlink_target": ""
} |
import argparse
import os
import sys
import httplib2
from oslo_serialization import jsonutils as json
from six import moves
from six.moves.urllib import parse as urlparse
from tempest import clients
from tempest.common import credentials
from tempest import config
CONF = config.CONF
CONF_PARSER = None
def _get_config_file():
default_config_dir = os.path.join(os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))), "etc")
default_config_file = "tempest.conf"
conf_dir = os.environ.get('TEMPEST_CONFIG_DIR', default_config_dir)
conf_file = os.environ.get('TEMPEST_CONFIG', default_config_file)
path = os.path.join(conf_dir, conf_file)
fd = open(path, 'rw')
return fd
def change_option(option, group, value):
if not CONF_PARSER.has_section(group):
CONF_PARSER.add_section(group)
CONF_PARSER.set(group, option, str(value))
def print_and_or_update(option, group, value, update):
print('Config option %s in group %s should be changed to: %s'
% (option, group, value))
if update:
change_option(option, group, value)
def contains_version(prefix, versions):
return any([x for x in versions if x.startswith(prefix)])
def verify_glance_api_versions(os, update):
# Check glance api versions
_, versions = os.image_client.get_versions()
if CONF.image_feature_enabled.api_v1 != contains_version('v1.', versions):
print_and_or_update('api_v1', 'image_feature_enabled',
not CONF.image_feature_enabled.api_v1, update)
if CONF.image_feature_enabled.api_v2 != contains_version('v2.', versions):
print_and_or_update('api_v2', 'image_feature_enabled',
not CONF.image_feature_enabled.api_v2, update)
def _get_unversioned_endpoint(base_url):
endpoint_parts = urlparse.urlparse(base_url)
endpoint = endpoint_parts.scheme + '://' + endpoint_parts.netloc
return endpoint
def _get_api_versions(os, service):
client_dict = {
'nova': os.servers_client,
'keystone': os.identity_client,
'cinder': os.volumes_client,
}
client_dict[service].skip_path()
endpoint = _get_unversioned_endpoint(client_dict[service].base_url)
dscv = CONF.identity.disable_ssl_certificate_validation
ca_certs = CONF.identity.ca_certificates_file
raw_http = httplib2.Http(disable_ssl_certificate_validation=dscv,
ca_certs=ca_certs)
__, body = raw_http.request(endpoint, 'GET')
client_dict[service].reset_path()
body = json.loads(body)
if service == 'keystone':
versions = map(lambda x: x['id'], body['versions']['values'])
else:
versions = map(lambda x: x['id'], body['versions'])
return list(versions)
def verify_keystone_api_versions(os, update):
# Check keystone api versions
versions = _get_api_versions(os, 'keystone')
if (CONF.identity_feature_enabled.api_v2 !=
contains_version('v2.', versions)):
print_and_or_update('api_v2', 'identity_feature_enabled',
not CONF.identity_feature_enabled.api_v2, update)
if (CONF.identity_feature_enabled.api_v3 !=
contains_version('v3.', versions)):
print_and_or_update('api_v3', 'identity_feature_enabled',
not CONF.identity_feature_enabled.api_v3, update)
def verify_cinder_api_versions(os, update):
# Check cinder api versions
versions = _get_api_versions(os, 'cinder')
if (CONF.volume_feature_enabled.api_v1 !=
contains_version('v1.', versions)):
print_and_or_update('api_v1', 'volume_feature_enabled',
not CONF.volume_feature_enabled.api_v1, update)
if (CONF.volume_feature_enabled.api_v2 !=
contains_version('v2.', versions)):
print_and_or_update('api_v2', 'volume_feature_enabled',
not CONF.volume_feature_enabled.api_v2, update)
def verify_api_versions(os, service, update):
verify = {
'cinder': verify_cinder_api_versions,
'glance': verify_glance_api_versions,
'keystone': verify_keystone_api_versions,
}
if service not in verify:
return
verify[service](os, update)
def get_extension_client(os, service):
extensions_client = {
'nova': os.extensions_client,
'cinder': os.volumes_extension_client,
'neutron': os.network_client,
'swift': os.account_client,
}
# NOTE (e0ne): Use Cinder API v2 by default because v1 is deprecated
if CONF.volume_feature_enabled.api_v2:
extensions_client['cinder'] = os.volumes_v2_extension_client
else:
extensions_client['cinder'] = os.volumes_extension_client
if service not in extensions_client:
print('No tempest extensions client for %s' % service)
exit(1)
return extensions_client[service]
def get_enabled_extensions(service):
extensions_options = {
'nova': CONF.compute_feature_enabled.api_extensions,
'cinder': CONF.volume_feature_enabled.api_extensions,
'neutron': CONF.network_feature_enabled.api_extensions,
'swift': CONF.object_storage_feature_enabled.discoverable_apis,
}
if service not in extensions_options:
print('No supported extensions list option for %s' % service)
exit(1)
return extensions_options[service]
def verify_extensions(os, service, results):
extensions_client = get_extension_client(os, service)
if service != 'swift':
resp = extensions_client.list_extensions()
else:
__, resp = extensions_client.list_extensions()
# For Nova, Cinder and Neutron we use the alias name rather than the
# 'name' field because the alias is considered to be the canonical
# name.
if isinstance(resp, dict):
if service == 'swift':
# Remove Swift general information from extensions list
resp.pop('swift')
extensions = resp.keys()
else:
extensions = map(lambda x: x['alias'], resp['extensions'])
else:
extensions = map(lambda x: x['alias'], resp)
extensions = list(extensions)
if not results.get(service):
results[service] = {}
extensions_opt = get_enabled_extensions(service)
if extensions_opt[0] == 'all':
results[service]['extensions'] = extensions
return results
# Verify that all configured extensions are actually enabled
for extension in extensions_opt:
results[service][extension] = extension in extensions
# Verify that there aren't additional extensions enabled that aren't
# specified in the config list
for extension in extensions:
if extension not in extensions_opt:
results[service][extension] = False
return results
def display_results(results, update, replace):
update_dict = {
'swift': 'object-storage-feature-enabled',
'nova': 'compute-feature-enabled',
'cinder': 'volume-feature-enabled',
'neutron': 'network-feature-enabled',
}
for service in results:
# If all extensions are specified as being enabled there is no way to
# verify this so we just assume this to be true
if results[service].get('extensions'):
if replace:
output_list = results[service].get('extensions')
else:
output_list = ['all']
else:
extension_list = get_enabled_extensions(service)
output_list = []
for extension in results[service]:
if not results[service][extension]:
if extension in extension_list:
print("%s extension: %s should not be included in the "
"list of enabled extensions" % (service,
extension))
else:
print("%s extension: %s should be included in the list"
" of enabled extensions" % (service, extension))
output_list.append(extension)
else:
output_list.append(extension)
if update:
# Sort List
output_list.sort()
# Convert list to a string
output_string = ', '.join(output_list)
if service == 'swift':
change_option('discoverable_apis', update_dict[service],
output_string)
else:
change_option('api_extensions', update_dict[service],
output_string)
def check_service_availability(os, update):
services = []
avail_services = []
codename_match = {
'volume': 'cinder',
'network': 'neutron',
'image': 'glance',
'object_storage': 'swift',
'compute': 'nova',
'orchestration': 'heat',
'metering': 'ceilometer',
'telemetry': 'ceilometer',
'data_processing': 'sahara',
'baremetal': 'ironic',
'identity': 'keystone',
'messaging': 'zaqar',
'database': 'trove'
}
# Get catalog list for endpoints to use for validation
_token, auth_data = os.auth_provider.get_auth()
if os.auth_version == 'v2':
catalog_key = 'serviceCatalog'
else:
catalog_key = 'catalog'
for entry in auth_data[catalog_key]:
services.append(entry['type'])
# Pull all catalog types from config file and compare against endpoint list
for cfgname in dir(CONF._config):
cfg = getattr(CONF, cfgname)
catalog_type = getattr(cfg, 'catalog_type', None)
if not catalog_type:
continue
else:
if cfgname == 'identity':
# Keystone is a required service for tempest
continue
if catalog_type not in services:
if getattr(CONF.service_available, codename_match[cfgname]):
print('Endpoint type %s not found either disable service '
'%s or fix the catalog_type in the config file' % (
catalog_type, codename_match[cfgname]))
if update:
change_option(codename_match[cfgname],
'service_available', False)
else:
if not getattr(CONF.service_available,
codename_match[cfgname]):
print('Endpoint type %s is available, service %s should be'
' set as available in the config file.' % (
catalog_type, codename_match[cfgname]))
if update:
change_option(codename_match[cfgname],
'service_available', True)
# If we are going to enable this we should allow
# extension checks.
avail_services.append(codename_match[cfgname])
else:
avail_services.append(codename_match[cfgname])
return avail_services
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--update', action='store_true',
help='Update the config file with results from api '
'queries. This assumes whatever is set in the '
'config file is incorrect. In the case of '
'endpoint checks where it could either be the '
'incorrect catalog type or the service available '
'option the service available option is assumed '
'to be incorrect and is thus changed')
parser.add_argument('-o', '--output',
help="Output file to write an updated config file to. "
"This has to be a separate file from the "
"original config file. If one isn't specified "
"with -u the new config file will be printed to "
"STDOUT")
parser.add_argument('-r', '--replace-ext', action='store_true',
help="If specified the all option will be replaced "
"with a full list of extensions")
args = parser.parse_args()
return args
def main():
print('Running config verification...')
opts = parse_args()
update = opts.update
replace = opts.replace_ext
global CONF_PARSER
outfile = sys.stdout
if update:
conf_file = _get_config_file()
if opts.output:
outfile = open(opts.output, 'w+')
CONF_PARSER = moves.configparser.SafeConfigParser()
CONF_PARSER.optionxform = str
CONF_PARSER.readfp(conf_file)
icreds = credentials.get_isolated_credentials('verify_tempest_config')
try:
os = clients.Manager(icreds.get_primary_creds())
services = check_service_availability(os, update)
results = {}
for service in ['nova', 'cinder', 'neutron', 'swift']:
if service not in services:
continue
results = verify_extensions(os, service, results)
# Verify API versions of all services in the keystone catalog and
# keystone itself.
services.append('keystone')
for service in services:
verify_api_versions(os, service, update)
display_results(results, update, replace)
if update:
conf_file.close()
CONF_PARSER.write(outfile)
outfile.close()
finally:
icreds.clear_isolated_creds()
if __name__ == "__main__":
main()
| {
"content_hash": "debe44ce8e2692a760ff45b6309f4169",
"timestamp": "",
"source": "github",
"line_count": 361,
"max_line_length": 79,
"avg_line_length": 38.498614958448755,
"alnum_prop": 0.5848323499784142,
"repo_name": "tonyli71/tempest",
"id": "6d53b598a5c238d3edc17882851b4bf1ab268c62",
"size": "14523",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/cmd/verify_tempest_config.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2823834"
},
{
"name": "Shell",
"bytes": "8578"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
import json
import subprocess
import signal
from nxtools.common import decode_if_py3, string_types
from nxtools.misc import indent
from nxtools.logging import *
from nxtools.files import FileObject
__all__ = ["ffprobe"]
def ffprobe(input_file, verbose=False):
"""Runs ffprobe on file and returns python dict with result"""
if isinstance(input_file, FileObject):
exists = input_file.exists
path = input_file.path
elif type(input_file) in string_types:
exists = os.path.exists(input_file)
path = input_file
else:
raise TypeError("input_path must be of string or FileObject type")
if not exists:
logging.error("ffprobe: file does not exist ({})".format(input_path))
return False
cmd = [
"ffprobe",
"-show_format",
"-show_streams",
"-print_format", "json",
path
]
FNULL = open(os.devnull, "w")
logging.debug("Executing {}".format(" ".join(cmd)))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
res = ""
while True:
ch = decode_if_py3(proc.stdout.read(1))
if not ch:
break
res += ch
res += decode_if_py3(proc.stdout.read())
if proc.returncode:
if verbose:
logging.error("Unable to read media file\n\n{}\n\n".format(indent(proc.stderr.read())))
else:
logging.warning("Unable to read media file {}".format(input_path))
return False
return json.loads(res)
| {
"content_hash": "9b33e2eb45149bbc1b9a1e8d1a1ed95e",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 99,
"avg_line_length": 31.235294117647058,
"alnum_prop": 0.6151914626490897,
"repo_name": "martastain/nxtools",
"id": "42449f181e82e6548a8cf8574d168f523875e0f4",
"size": "2725",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nxtools/media/ffprobe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "47725"
},
{
"name": "Shell",
"bytes": "95"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import Order, OrderItem, OrderStateChange
class OrderItemInline(admin.TabularInline): # noqa
model = OrderItem
class OrderStateChangeInline(admin.TabularInline): # noqa
model = OrderStateChange
class OrderAdmin(admin.ModelAdmin): # noqa
list_fields = ('coordinator', 'restaurant_name')
inlines = [
OrderItemInline,
OrderStateChangeInline,
]
admin.site.register(Order, OrderAdmin)
| {
"content_hash": "19b32e273f6061be6a05641fe70a760a",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 58,
"avg_line_length": 22.523809523809526,
"alnum_prop": 0.7293868921775899,
"repo_name": "chaosdorf/chaospizza",
"id": "428de3a668bc1950394d3024e4e4b0de5bf58e33",
"size": "497",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/chaospizza/orders/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "875"
},
{
"name": "Dockerfile",
"bytes": "653"
},
{
"name": "HTML",
"bytes": "15355"
},
{
"name": "Makefile",
"bytes": "2744"
},
{
"name": "Python",
"bytes": "119619"
},
{
"name": "Shell",
"bytes": "944"
}
],
"symlink_target": ""
} |
import os
import sys
import tarfile
import logging
from uuid import uuid4
from functools import wraps
from StringIO import StringIO
from datetime import datetime
from contextlib import contextmanager
from mock import patch
import cloudify.utils
import cloudify.exceptions
from cloudify import ctx as op_ctx
from cloudify.decorators import operation, workflow
from cloudify.state import workflow_ctx as workflow_ctx
from cloudify.workflows import tasks as workflow_tasks
from cloudify_rest_client.nodes import Node
from cloudify_rest_client.executions import Execution
from cloudify_rest_client.maintenance import Maintenance
from cloudify_rest_client.node_instances import NodeInstance
def execution_mock(status, wf_id='mock_wf'):
return Execution({
'status': status,
'workflow_id': wf_id,
'deployment_id': 'deployment-id',
'blueprint_id': 'blueprint-id',
'error': '',
'id': uuid4(),
'created_at': datetime.now().isoformat()[:-3],
'parameters': {}
})
def mock_log_message_prefix(event):
return event['event_name']
@operation
def mock_op(param, custom_param=None, **kwargs):
props = op_ctx.instance.runtime_properties
props['param'] = param
props['custom_param'] = custom_param
props['provider_context'] = op_ctx.provider_context
@workflow
def mock_workflow(param, custom_param=None, **kwargs):
for node in workflow_ctx.nodes:
for instance in node.instances:
instance.execute_operation('test.op', kwargs={
'param': param,
'custom_param': custom_param
})
@workflow
def logging_workflow(**kwargs):
kwargs.pop('ctx', None)
graph = workflow_ctx.graph_mode()
instance = next(workflow_ctx.node_instances)
task = instance.execute_operation('test.op', kwargs=kwargs)
def on_failure(tsk):
return workflow_tasks.HandlerResult.ignore()
task.on_failure = on_failure
graph.add_task(task)
graph.execute()
@operation
def logging_operation(level, message, error=False, user_cause=False, **kwargs):
if error:
causes = []
if user_cause:
try:
raise RuntimeError(message)
except RuntimeError:
_, ex, tb = sys.exc_info()
causes.append(cloudify.utils.exception_to_error_cause(
ex, tb))
raise cloudify.exceptions.NonRecoverableError(message, causes=causes)
else:
level = getattr(logging, level)
op_ctx.logger.log(level, message)
def counter(func):
@wraps(func)
def tmp(*_):
tmp.count += 1
return func()
tmp.count = 0
return tmp
@counter
def mock_activated_status():
if mock_activated_status.count % 2 == 1:
return Maintenance({'status': 'deactivated'})
return Maintenance({'status': 'activated'})
def mock_is_timeout(*_):
return True
def node_instance_get_mock():
return NodeInstance({
'id': uuid4(),
'deployment_id': 'deployment_id',
'host_id': 'host_id',
'node_id': 'node_id',
'state': 'started',
'runtime_properties': {
'floating_ip': '127.0.0.1'
}
})
def node_get_mock():
return Node({
'id': uuid4(),
'deployment_id': 'deployment-id',
'blueprint_id': 'blueprint_id',
'host_id': 'host_id',
'type': 'Compute',
'number_of_instances': '1',
'planned_number_of_instances': '2',
'properties': {
'port': '8080'
}
})
def make_tarfile(output_filename, source_dir, write_type='w'):
with tarfile.open(output_filename, write_type) as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
@contextmanager
def mock_stdout():
stdout = StringIO()
with patch('sys.stdout', stdout):
yield stdout
@contextmanager
def mock_logger(attribute_path):
output = StringIO()
class MockLogger(object):
@staticmethod
def info(message):
output.write(message)
with patch(attribute_path, MockLogger):
yield output
class MockListResponse(object):
def __init__(self, items, _):
self.items = items
self.metadata = None
def __iter__(self):
return iter(self.items)
def __getitem__(self, index):
return self.items[index]
def __len__(self):
return len(self.items)
def sort(self, cmp=None, key=None, reverse=False):
return self.items.sort(cmp, key, reverse)
| {
"content_hash": "6a523a8c0ff7da161211c436dc3baa1a",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 79,
"avg_line_length": 25.407821229050278,
"alnum_prop": 0.6244503078276166,
"repo_name": "codilime/cloudify-cli",
"id": "3090de074d9f2a9b2096afa555d34548b1397ee3",
"size": "5185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudify_cli/tests/commands/mocks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6346"
},
{
"name": "Inno Setup",
"bytes": "4990"
},
{
"name": "Makefile",
"bytes": "4178"
},
{
"name": "PowerShell",
"bytes": "8677"
},
{
"name": "Python",
"bytes": "1916612"
},
{
"name": "Ruby",
"bytes": "29040"
},
{
"name": "Shell",
"bytes": "9505"
}
],
"symlink_target": ""
} |
"""
Example of using MathApp Circle class.
"""
from ggame.point import Point
from ggame.circle import Circle
from ggame.mathapp import MathApp
# P1 is the center of the circle
P1 = Point((0.5, 1))
# P2 is on the perimeter
P2 = Point((1.3, 1))
# define a circle from center and perimeter points
C = Circle(P1, P2)
# allow the user to drag the perimeter point to resize the circle
P2.movable = True
MathApp().run()
| {
"content_hash": "228216df05e5688b20558151931680f9",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 65,
"avg_line_length": 24.41176470588235,
"alnum_prop": 0.727710843373494,
"repo_name": "BrythonServer/ggame",
"id": "2e694943d0f6f2090107472c3e024fbaff621ead",
"size": "415",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/circlecircle.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "203298"
},
{
"name": "Shell",
"bytes": "1010"
}
],
"symlink_target": ""
} |
from OSMHandler.Checker import Check
from Downloader.Defaults import OSM_MARKING_VERSION
import numpy
import os.path
print "imported Marker.py, inside DB requiring section."
stopfile = '/home/ekmek/Desktop/Project II/stop_dir/stop.txt'
# global variable - we can reuse Marker
ConnHandler = None
def Mark(Segments, radius = 50, interval = None, backwards=False):
'''
Mark Segments with radius. Call MarkSegment on each Segment.
:param Segments:
:param radius: radius in meters
:return:
'''
from OSMHandler.ConnectionHandlerObj import ConnectionHandler
global ConnHandler
if ConnHandler is None:
ConnHandler = ConnectionHandler()
ConnHandler.report()
#if Check(Segments):
# print "Segments seem to be up to date, breaking."
# return True
# Mark segments
if interval is None:
interval = [0, len(Segments)]
if backwards:
indices = range(interval[1]-1, interval[0]-1, -1)
else:
indices = range(interval[0], interval[1])
i = 0
print interval
for index in indices:
Segment = Segments[index]
i += 1
stop = checkForStopFile()
is_marked = Segment.checkOSMVersion()
print i, "th from", interval[1] - interval[0], "[stop ",stop,", is marked ",is_marked,"]", interval, "index=", index
if not stop and not is_marked:
MarkSegment(Segment, radius = radius)
Segments[index] = Segment
def checkForStopFile():
stopfile_present = os.path.isfile(stopfile)
stop = stopfile_present
return stop
def MarkSegment(Segment, radius = 50):
'''
Mark Segment with new OSM vector depending on what the PosgreSQL db will tell us about the neighborhood.
:param Segment: One Segment object initially without OSM data.
:param radius: radius in meters
:return:
'''
index = 0
for distinctLocation in Segment.DistinctLocations:
print "We are in ", distinctLocation
# we combine them here!
table_names = ["planet_osm_line", "planet_osm_point", "planet_osm_polygon", "planet_osm_roads"]
global ConnHandler
cumulative_vector = []
for table in table_names:
[nearby_vector, _] = ConnHandler.query_location(location=[distinctLocation[1], distinctLocation[0]], radius=radius, table_name=table)
if len(cumulative_vector) == 0:
cumulative_vector = nearby_vector
else:
from operator import add
cumulative_vector = map(add, cumulative_vector, nearby_vector)
print len(nearby_vector), numpy.sum(nearby_vector), nearby_vector
Segment.markWithVector(cumulative_vector, index, OSM_MARKING_VERSION)
index += 1
print Segment
def closeConnection():
# Close connection please.
global ConnHandler
ConnHandler.close_connection()
ConnHandler.report()
def MergeMarking_LoadAndSave(pats_seg1, path_seg2, path_out):
import Downloader.DataOperations
Segments1 = Downloader.DataOperations.LoadDataFile(pats_seg1)
Segments2 = Downloader.DataOperations.LoadDataFile(path_seg2)
MergedSegments = MergeMarking(Segments1, Segments2)
Downloader.DataOperations.SaveDataFile(path_out, MergedSegments)
def MergeMarking(Segments1, Segments2):
print "Merging labeling from two segments files, lens:", len(Segments1), len(Segments2)
if len(Segments1) <> len(Segments2):
return None
for i in range(0, len(Segments1)):
S1 = Segments1[i]
S2 = Segments2[i]
s1_is_marked = S1.checkOSMVersion()
s2_is_marked = S2.checkOSMVersion()
if not s1_is_marked and s2_is_marked:
# mark s1 by the osms which are in s2!
osm_version = S2.Segment_OSM_MARKING_VERSION
for j in range(0,len(S2.DistinctNearbyVector)):
nearby_vector = S2.DistinctNearbyVector[j]
S1.markWithVector(nearby_vector, j, osm_version)
Segments1[i] = S1
return Segments1
def loadAndAnalyzeMarking(path):
import Downloader.DataOperations
Segments = Downloader.DataOperations.LoadDataFile(path)
analyzeMarking(Segments)
def analyzeMarking(Segments):
'''
prints index boundaries of which segments are marked and which are not
:param Segments:
:return:
'''
started_marked_region = False
a = -1
i = 0
while i<len(Segments):
is_marked = Segments[i].checkOSMVersion(verbose=False)
if is_marked and not started_marked_region:
started_marked_region = True
a = i
if not is_marked and started_marked_region:
started_marked_region = False
print "Region ",a,"-",i," was marked!"
i += 1
if started_marked_region:
print "Region ", a, "-", i, " was marked!"
started_marked_region = False
"""
s1path = '/home/ekmek/Desktop/Project II/MGR-Project-Code/Data/StreetViewData/Prague_DOP_Cyklotrasy_l/SegmentsData_MERGED.dump'
s2path = '/home/ekmek/Desktop/Project II/MGR-Project-Code/Data/StreetViewData/Prague_DOP_Cyklotrasy_l/SegmentsData_fromBack__backup_995.dump'
s_merged='/home/ekmek/Desktop/Project II/MGR-Project-Code/Data/StreetViewData/Prague_DOP_Cyklotrasy_l/SegmentsData_MERGED.dump'
MergeMarking_LoadAndSave(s1path, s2path, s_merged)
"""
#s_merged='/home/ekmek/Desktop/Project II/MGR-Project-Code/Data/StreetViewData/Prague_DOP_Cyklotrasy_l/SegmentsData_MERGED.dump'
#loadAndAnalyzeMarking(s_merged) | {
"content_hash": "7dd04e575bf146c04edf22e6267a2389",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 145,
"avg_line_length": 31.890173410404625,
"alnum_prop": 0.6697480514772521,
"repo_name": "previtus/MGR-Project-Code",
"id": "80a6d7a380859f1e49c9e8c09eee5b12a7ebf0bc",
"size": "5695",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OSMHandler/Marker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "247470"
},
{
"name": "Python",
"bytes": "505284"
},
{
"name": "Shell",
"bytes": "1109"
}
],
"symlink_target": ""
} |
from oslo_config import cfg
from oslo_log import log
from neutron._i18n import _
from nuage_neutron.plugins.common import constants
LOG = log.getLogger(__name__)
underlay_routing_choices = [constants.NUAGE_UNDERLAY_SNAT,
constants.NUAGE_UNDERLAY_ROUTE,
constants.NUAGE_UNDERLAY_OFF,
constants.NUAGE_UNDERLAY_NOT_AVAILABLE]
restproxy_opts = [
cfg.StrOpt('server', default='vsd.example.com:8443',
help=_("IP address and port of Nuage's VSD server or cluster")),
cfg.StrOpt('serverauth', default='csproot:csproot',
secret=True,
help=_('Username and password for authentication')),
cfg.BoolOpt('serverssl', default=True,
help=_('Boolean for SSL connection with VSD server')),
cfg.StrOpt('verify_cert', default='False',
help=_("Either a boolean (True or False), indicating whether "
"we verify the VSD's certificate, or a string which is "
"the local path of the VSD's PEM file or CA_BUNDLE file "
"to be verified")),
cfg.IntOpt('server_timeout', default=30,
help=_('VSD server invocation timeout')),
cfg.IntOpt('server_max_retries', default=5,
help=_('Number of retries invoking VSD server')),
cfg.IntOpt('server_max_retries_on_domain_delete', default=5,
help=_('Number of retries deleting domain')),
cfg.StrOpt('base_uri', default='/nuage/api/v6',
help=_('Nuage provided base uri to reach out to VSD')),
cfg.StrOpt('organization', default='csp',
help=_('Organization name in which VSD will orchestrate '
'network resources using openstack')),
cfg.StrOpt('auth_resource', default='/me',
help=_('Nuage provided uri for initial authorization to '
'access VSD')),
cfg.StrOpt('default_net_partition_name',
default='OpenStackDefaultNetPartition',
help=_('Default Network partition in which VSD will '
'orchestrate network resources using openstack')),
cfg.IntOpt('default_floatingip_quota', default=254,
help=_('Per netpartition quota of floating ips')),
cfg.StrOpt('default_l3domain_template', default=''),
cfg.StrOpt('default_l2domain_template', default=''),
cfg.StrOpt('default_isolated_zone', default=''),
cfg.StrOpt('default_shared_zone', default=''),
cfg.StrOpt(constants.NUAGE_UNDERLAY_INI,
choices=underlay_routing_choices,
default=constants.NUAGE_UNDERLAY_OFF),
cfg.BoolOpt('nuage_fip_underlay', default=False),
cfg.StrOpt('cms_id', default=None,
help=_('ID of a Cloud Management System on the VSD which '
'identifies this openstack instance')),
cfg.StrOpt('nuage_uplink', default=None)
]
fiprate_opts = [
cfg.StrOpt('fip_rate_change_log', default=''),
cfg.IntOpt('default_ingress_fip_rate_kbps',
help=_('FIP rate limit in ingress direction in kbs.'),
min=-1, max=constants.MAX_VSD_INTEGER,
default=-1),
cfg.IntOpt('default_egress_fip_rate_kbps',
help=_('FIP rate limit in egress direction in kbs.'),
min=-1, max=constants.MAX_VSD_INTEGER,
default=None),
]
plugin_opts = [
cfg.ListOpt('device_owner_prefix', default=[],
help=_('List of device_owners prefix for which vports are '
'not created in VSD.')),
cfg.BoolOpt('flow_logging_enabled', default=False,
help=_('Set to true to enable flow logging on all policy '
'entries. Changing this does not affect existing '
'policy entries.')),
cfg.BoolOpt('stats_collection_enabled', default=False,
help=_('Set to true to enable statistics collecting on all '
'policy entries. Changing this does not affect '
'existing policy entries.')),
cfg.BoolOpt('default_allow_non_ip', default=False,
help=_('Set to true to allow non-IP traffic by default')),
cfg.BoolOpt('enable_ingress_replication', default=False,
help=_('Set to true to enable ingress replication (NETCONF)')),
cfg.BoolOpt('enable_native_sriov_trunks', default=False,
help=_('Set to true to enable SRIOV trunks '
'on non-Nuage networks')),
cfg.ListOpt('experimental_features', default=[],
help=_('List of experimental features to be enabled.')),
cfg.ListOpt('enable_debug', default=[],
help=_('List of debug features to be enabled.'))
]
def nuage_register_cfg_opts():
cfg.CONF.register_opts(restproxy_opts, 'RESTPROXY')
cfg.CONF.register_opts(fiprate_opts, 'FIPRATE')
cfg.CONF.register_opts(plugin_opts, 'PLUGIN')
def is_enabled(name):
if name in cfg.CONF.PLUGIN.experimental_features:
LOG.info('Experimental feature %s enabled!', name)
return True
if name in cfg.CONF.PLUGIN.enable_debug:
return True
return False
def default_allow_non_ip_enabled():
return cfg.CONF.PLUGIN.default_allow_non_ip
def ingress_replication_enabled():
return cfg.CONF.PLUGIN.enable_ingress_replication
def enable_native_sriov_trunks():
return cfg.CONF.PLUGIN.enable_native_sriov_trunks
| {
"content_hash": "77653ad0bb03a26bb35e9ef99abc02df",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 79,
"avg_line_length": 44.49193548387097,
"alnum_prop": 0.6083016131955773,
"repo_name": "nuagenetworks/nuage-openstack-neutron",
"id": "f98079aa8be5cb53b49221da8837736037e9e853",
"size": "6133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nuage_neutron/plugins/common/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1029"
},
{
"name": "Python",
"bytes": "1373451"
},
{
"name": "Shell",
"bytes": "22838"
}
],
"symlink_target": ""
} |
"""Contains implementations for each ported operation in NumPy.
Attributes:
decoder_ (TYPE): Description
encoder_ (TYPE): Description
norm_factor_id_ (int): Unique IDs given to norm factor literals
"""
from encoders import *
from weld.weldobject import *
encoder_ = NumPyEncoder()
decoder_ = NumPyDecoder()
def div(array, other, ty):
"""
Normalizes the passed-in array by the passed in quantity.
Args:
array (WeldObject / Numpy.ndarray): Input array to normalize
other (WeldObject / float): Normalization factor
ty (WeldType): Type of each element in the input array
Returns:
A WeldObject representing this computation
"""
weld_obj = WeldObject(encoder_, decoder_)
array_var = weld_obj.update(array)
if isinstance(array, WeldObject):
array_var = array.obj_id
weld_obj.dependencies[array_var] = array
other_var = None
if isinstance(other, WeldObject):
other_var = other.obj_id
weld_obj.dependencies[other_var] = other
else:
other_var = ("%.2f" % other)
weld_template = """
map(
%(array)s,
|value| value / %(ty)s(%(other)s)
)
"""
weld_obj.weld_code = weld_template % {"array": array_var,
"other": other_var,
"ty": ty}
return weld_obj
def aggr(array, op, initial_value, ty):
"""
Computes the aggregate of elements in the array.
Args:
array (WeldObject / Numpy.ndarray): Input array to aggregate
op (str): Op string used to aggregate the array (+ / *)
initial_value (int): Initial value for aggregation
ty (WeldType): Type of each element in the input array
Returns:
A WeldObject representing this computation
"""
weld_obj = WeldObject(encoder_, decoder_)
array_var = weld_obj.update(array)
if isinstance(array, WeldObject):
array_var = array.obj_id
weld_obj.dependencies[array_var] = array
weld_template = """
result(
for(
%(array)s,
merger[%(ty)s,%(op)s],
|b, i, e| merge(b, e)
)
)
"""
weld_obj.weld_code = weld_template % {
"array": array_var, "ty": ty, "op": op}
return weld_obj
def dot(matrix, vector, matrix_ty, vector_ty):
"""
Computes the dot product between a matrix and a vector.
Args:
matrix (WeldObject / Numpy.ndarray): 2-d input matrix
vector (WeldObject / Numpy.ndarray): 1-d input vector
ty (WeldType): Type of each element in the input matrix and vector
Returns:
A WeldObject representing this computation
"""
weld_obj = WeldObject(encoder_, decoder_)
matrix_var = weld_obj.update(matrix)
if isinstance(matrix, WeldObject):
matrix_var = matrix.obj_id
weld_obj.dependencies[matrix_var] = matrix
vector_var = weld_obj.update(vector)
loopsize_annotation = ""
if isinstance(vector, WeldObject):
vector_var = vector.obj_id
weld_obj.dependencies[vector_var] = vector
if isinstance(vector, np.ndarray):
loopsize_annotation = "@(loopsize: %dL)" % len(vector)
weld_template = """
map(
%(matrix)s,
|row: vec[%(matrix_ty)s]|
result(
%(loopsize_annotation)s
for(
result(
%(loopsize_annotation)s
for(
zip(row, %(vector)s),
appender,
|b2, i2, e2: {%(matrix_ty)s, %(vector_ty)s}|
merge(b2, f64(e2.$0 * %(matrix_ty)s(e2.$1)))
)
),
merger[f64,+],
|b, i, e| merge(b, e)
)
)
)
"""
weld_obj.weld_code = weld_template % {"matrix": matrix_var,
"vector": vector_var,
"matrix_ty": matrix_ty,
"vector_ty": vector_ty,
"loopsize_annotation": loopsize_annotation}
return weld_obj
def exp(array, ty):
"""
Computes the per-element exponenet of the passed-in array.
Args:
array (WeldObject / Numpy.ndarray): Input array
ty (WeldType): Type of each element in the input array
Returns:
A WeldObject representing this computation
"""
weld_obj = WeldObject(encoder_, decoder_)
array_var = weld_obj.update(array)
if isinstance(array, WeldObject):
array_var = array.obj_id
weld_obj.dependencies[array_var] = array
weld_template = """
map(
%(array)s,
|ele: %(ty)s| exp(ele)
)
"""
weld_obj.weld_code = weld_template % {"array": array_var, "ty": ty}
return weld_obj
| {
"content_hash": "f719da942a47cd2c5eedbc6268d3989a",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 85,
"avg_line_length": 28.847058823529412,
"alnum_prop": 0.547716150081566,
"repo_name": "sppalkia/weld",
"id": "01f349e0ef56465fdfea5d1951701002c52fee57",
"size": "4904",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/grizzly/grizzly/numpy_weld_impl.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "799"
},
{
"name": "C",
"bytes": "660"
},
{
"name": "C++",
"bytes": "27987"
},
{
"name": "Makefile",
"bytes": "2660"
},
{
"name": "Python",
"bytes": "301176"
},
{
"name": "Rust",
"bytes": "1127035"
},
{
"name": "Shell",
"bytes": "2090"
}
],
"symlink_target": ""
} |
import socket
def Get_local_ip():
"""
Returns the actual ip of the local machine.
This code figures out what source address would be used if some traffic
were to be sent out to some well known address on the Internet. In this
case, a Google DNS server is used, but the specific address does not
matter much. No traffic is actually sent.
"""
try:
socket.setdefaulttimeout(5)
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('1.1.1.1', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return "127.0.0.1"
if __name__ == "__main__":
IPADDR = Get_local_ip()
print(IPADDR)
| {
"content_hash": "12f010d1e1448ec718ead368723c7de7",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 73,
"avg_line_length": 30.90909090909091,
"alnum_prop": 0.6779411764705883,
"repo_name": "lj2007331/lnmp",
"id": "80cd99fabd40f4c4ece7e249a40f6d045654fe0e",
"size": "702",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "include/get_ipaddr.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "36774"
},
{
"name": "Python",
"bytes": "2275"
},
{
"name": "Shell",
"bytes": "669680"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function
from itertools import chain
import numpy as np
import torch
from odin.utils import as_tuple
class SequentialNetwork(torch.nn.Sequential):
def __init__(self, *args):
args = list(chain(*[as_tuple(a) for a in args]))
super().__init__(*args)
class ParallelNetwork(Sequential):
def forward(self, input):
outputs = []
for module in self._modules.values():
outputs.append(module(input))
return outputs
| {
"content_hash": "d6945c1b9c907e958f21ad18cb9eab24",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 64,
"avg_line_length": 20.791666666666668,
"alnum_prop": 0.6933867735470942,
"repo_name": "imito/odin",
"id": "6eca22eba08f522fb1cb07da684e8e8c2bbd2667",
"size": "499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "odin/networks_torch/util_modules.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1516670"
}
],
"symlink_target": ""
} |
from cms.api import get_page_draft
from cms.toolbar_pool import toolbar_pool
from cms.toolbar_base import CMSToolbar
from cms.utils import get_cms_setting
from cms.utils.permissions import has_page_change_permission
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.translation import ugettext_lazy as _
from .models import PageBannerExtension
_banner_change_url = 'admin:djangocms_pagebanner_pagebannerextension_change'
_banner_add_url = 'admin:djangocms_pagebanner_pagebannerextension_add'
@toolbar_pool.register
class PageBannerExtensionToolbar(CMSToolbar):
def populate(self):
# always use draft if we have a page
self.page = get_page_draft(self.request.current_page)
if not self.page:
# Nothing to do
return
# check global permissions if CMS_PERMISSIONS is active
if get_cms_setting('PERMISSION'):
has_global_current_page_change_permission = \
has_page_change_permission(self.request)
else:
has_global_current_page_change_permission = False
# check if user has page edit permission
can_change = (self.request.current_page and
self.request.current_page.has_change_permission(
self.request))
if has_global_current_page_change_permission or can_change:
try:
page_banner_extension = PageBannerExtension.objects.get(
extended_object_id=self.page.id)
except PageBannerExtension.DoesNotExist:
page_banner_extension = None
try:
if page_banner_extension:
url = reverse(_banner_change_url,
args=(page_banner_extension.pk,))
else:
url = (reverse(_banner_add_url) +
'?extended_object=%s' % self.page.pk)
except NoReverseMatch:
# not in urls
pass
else:
not_edit_mode = not self.toolbar.edit_mode
current_page_menu = self.toolbar.get_or_create_menu('page')
current_page_menu.add_modal_item(_('Page banner'),
url=url,
disabled=not_edit_mode)
| {
"content_hash": "4786f6c1a1983c07e8724ef784547699",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 76,
"avg_line_length": 42.535714285714285,
"alnum_prop": 0.5885810243492863,
"repo_name": "MjAbuz/foundation",
"id": "71f05726793fa3f17f84622c2fdc4b1adae30d42",
"size": "2382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "foundation/djangocms_pagebanner/cms_toolbar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11867"
},
{
"name": "HTML",
"bytes": "54905"
},
{
"name": "JavaScript",
"bytes": "345"
},
{
"name": "Python",
"bytes": "685955"
},
{
"name": "Shell",
"bytes": "4363"
}
],
"symlink_target": ""
} |
from pytimeseries.transformer import *
from pytimeseries.extras import *
from pytimeseries.base_model import *
from pytimeseries.AR import *
from pytimeseries.HoltWinters import *
| {
"content_hash": "8e46616e85a18aa5f3082ea121c1f956",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 38,
"avg_line_length": 36.2,
"alnum_prop": 0.8232044198895028,
"repo_name": "jdvelasq/pytimeseries",
"id": "e7bc917dd01ff36db5ab134f05fbe4f8c2389d3c",
"size": "181",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytimeseries/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "200429"
},
{
"name": "Python",
"bytes": "97700"
}
],
"symlink_target": ""
} |
# Hey this is a very long commented line just to trigger pylint's line-too-long message. It is just above a dummy function with a very long code line.
def dummy_function():
print("What a beautiful dummy function with such a very long line inside that it should trigger pylint's line-too-long message ")
# Hey this is a very long commented line at the end of the module. It is just here to trigger pylint's line-too-long message.
| {
"content_hash": "ef82ca8260d6e42eef2e5f33d294f319",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 150,
"avg_line_length": 73.5,
"alnum_prop": 0.7528344671201814,
"repo_name": "ekwoodrich/python-dvrip",
"id": "21cb756551aaf4ffd532b8517ecd15d4d8aecf54",
"size": "492",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "env/lib/python3.5/site-packages/pylint/test/functional/line_too_long_end_of_module.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5706"
}
],
"symlink_target": ""
} |
from test.assert_json import assert_json
from topcoder.spam_checker import solution
def test_spam_checker ():
assert_json('spam_checker', solution)
| {
"content_hash": "817bb3c9df32d383e7d4459bbde6edf3",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 45,
"avg_line_length": 31.6,
"alnum_prop": 0.7468354430379747,
"repo_name": "erichaase/topcoder-python",
"id": "c64324860c09ad6beb242f48b93116fa1a2ef2c5",
"size": "158",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_spam_checker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22484"
}
],
"symlink_target": ""
} |
import argparse
import logging
import shlex
import typing
import unittest
from os import linesep
from os import path
from os.path import exists
from shutil import rmtree
from tempfile import mkdtemp
import pytest
import apache_beam as beam
from apache_beam import Impulse
from apache_beam import Map
from apache_beam import Pipeline
from apache_beam.coders import VarIntCoder
from apache_beam.io.external.generate_sequence import GenerateSequence
from apache_beam.io.kafka import ReadFromKafka
from apache_beam.io.kafka import WriteToKafka
from apache_beam.metrics import Metrics
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import FlinkRunnerOptions
from apache_beam.options.pipeline_options import PortableOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.runners.portability import job_server
from apache_beam.runners.portability import portable_runner
from apache_beam.runners.portability import portable_runner_test
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.transforms import userstate
from apache_beam.transforms.sql import SqlTransform
# Run as
#
# pytest flink_runner_test.py[::TestClass::test_case] \
# --test-pipeline-options="--environment_type=LOOPBACK"
_LOGGER = logging.getLogger(__name__)
Row = typing.NamedTuple("Row", [("col1", int), ("col2", str)])
beam.coders.registry.register_coder(Row, beam.coders.RowCoder)
class FlinkRunnerTest(portable_runner_test.PortableRunnerTest):
_use_grpc = True
_use_subprocesses = True
conf_dir = None
expansion_port = None
flink_job_server_jar = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.environment_type = None
self.environment_config = None
@pytest.fixture(autouse=True)
def parse_options(self, request):
if not request.config.option.test_pipeline_options:
raise unittest.SkipTest(
'Skipping because --test-pipeline-options is not specified.')
test_pipeline_options = request.config.option.test_pipeline_options
parser = argparse.ArgumentParser(add_help=True)
parser.add_argument(
'--flink_job_server_jar',
help='Job server jar to submit jobs.',
action='store')
parser.add_argument(
'--environment_type',
default='LOOPBACK',
choices=['DOCKER', 'PROCESS', 'LOOPBACK'],
help='Set the environment type for running user code. DOCKER runs '
'user code in a container. PROCESS runs user code in '
'automatically started processes. LOOPBACK runs user code on '
'the same process that originally submitted the job.')
parser.add_argument(
'--environment_option',
'--environment_options',
dest='environment_options',
action='append',
default=None,
help=(
'Environment configuration for running the user code. '
'Recognized options depend on --environment_type.\n '
'For DOCKER: docker_container_image (optional)\n '
'For PROCESS: process_command (required), process_variables '
'(optional, comma-separated)\n '
'For EXTERNAL: external_service_address (required)'))
known_args, unknown_args = parser.parse_known_args(
shlex.split(test_pipeline_options))
if unknown_args:
_LOGGER.warning('Discarding unrecognized arguments %s' % unknown_args)
self.set_flink_job_server_jar(
known_args.flink_job_server_jar or
job_server.JavaJarJobServer.path_to_beam_jar((
':runners:flink:%s:job-server:shadowJar' %
FlinkRunnerOptions.PUBLISHED_FLINK_VERSIONS[-1])))
self.environment_type = known_args.environment_type
self.environment_options = known_args.environment_options
@classmethod
def tearDownClass(cls):
if cls.conf_dir and exists(cls.conf_dir):
_LOGGER.info("removing conf dir: %s" % cls.conf_dir)
rmtree(cls.conf_dir)
super().tearDownClass()
@classmethod
def _create_conf_dir(cls):
"""Create (and save a static reference to) a "conf dir", used to provide
metrics configs and verify metrics output
It gets cleaned up when the suite is done executing"""
if hasattr(cls, 'conf_dir'):
cls.conf_dir = mkdtemp(prefix='flinktest-conf')
# path for a FileReporter to write metrics to
cls.test_metrics_path = path.join(cls.conf_dir, 'test-metrics.txt')
# path to write Flink configuration to
conf_path = path.join(cls.conf_dir, 'flink-conf.yaml')
file_reporter = 'org.apache.beam.runners.flink.metrics.FileReporter'
with open(conf_path, 'w') as f:
f.write(
linesep.join([
'metrics.reporters: file',
'metrics.reporter.file.class: %s' % file_reporter,
'metrics.reporter.file.path: %s' % cls.test_metrics_path,
'metrics.scope.operator: <operator_name>',
]))
@classmethod
def _subprocess_command(cls, job_port, expansion_port):
# will be cleaned up at the end of this method, and recreated and used by
# the job server
tmp_dir = mkdtemp(prefix='flinktest')
cls._create_conf_dir()
cls.expansion_port = expansion_port
try:
return [
'java',
'-Dorg.slf4j.simpleLogger.defaultLogLevel=warn',
'-jar',
cls.flink_job_server_jar,
'--flink-master',
'[local]',
'--flink-conf-dir',
cls.conf_dir,
'--artifacts-dir',
tmp_dir,
'--job-port',
str(job_port),
'--artifact-port',
'0',
'--expansion-port',
str(expansion_port),
]
finally:
rmtree(tmp_dir)
@classmethod
def get_runner(cls):
return portable_runner.PortableRunner()
@classmethod
def get_expansion_service(cls):
# TODO Move expansion address resides into PipelineOptions
return 'localhost:%s' % cls.expansion_port
@classmethod
def set_flink_job_server_jar(cls, flink_job_server_jar):
cls.flink_job_server_jar = flink_job_server_jar
def create_options(self):
options = super().create_options()
options.view_as(DebugOptions).experiments = ['beam_fn_api']
options._all_options['parallelism'] = 2
options.view_as(PortableOptions).environment_type = self.environment_type
options.view_as(
PortableOptions).environment_options = self.environment_options
return options
# Can't read host files from within docker, read a "local" file there.
def test_read(self):
print('name:', __name__)
with self.create_pipeline() as p:
lines = p | beam.io.ReadFromText('/etc/profile')
assert_that(lines, lambda lines: len(lines) > 0)
def test_no_subtransform_composite(self):
raise unittest.SkipTest("BEAM-4781")
def test_external_transform(self):
with self.create_pipeline() as p:
res = (
p
| GenerateSequence(
start=1, stop=10, expansion_service=self.get_expansion_service()))
assert_that(res, equal_to([i for i in range(1, 10)]))
def test_expand_kafka_read(self):
# We expect to fail here because we do not have a Kafka cluster handy.
# Nevertheless, we check that the transform is expanded by the
# ExpansionService and that the pipeline fails during execution.
with self.assertRaises(Exception) as ctx:
with self.create_pipeline() as p:
# pylint: disable=expression-not-assigned
(
p
| ReadFromKafka(
consumer_config={
'bootstrap.servers': 'notvalid1:7777, notvalid2:3531',
'group.id': 'any_group'
},
topics=['topic1', 'topic2'],
key_deserializer='org.apache.kafka.'
'common.serialization.'
'ByteArrayDeserializer',
value_deserializer='org.apache.kafka.'
'common.serialization.'
'LongDeserializer',
commit_offset_in_finalize=True,
timestamp_policy=ReadFromKafka.create_time_policy,
expansion_service=self.get_expansion_service()))
self.assertTrue(
'No resolvable bootstrap urls given in bootstrap.servers' in str(
ctx.exception),
'Expected to fail due to invalid bootstrap.servers, but '
'failed due to:\n%s' % str(ctx.exception))
def test_expand_kafka_write(self):
# We just test the expansion but do not execute.
# pylint: disable=expression-not-assigned
(
self.create_pipeline()
| Impulse()
| Map(lambda input: (1, input))
| WriteToKafka(
producer_config={
'bootstrap.servers': 'localhost:9092, notvalid2:3531'
},
topic='topic1',
key_serializer='org.apache.kafka.'
'common.serialization.'
'LongSerializer',
value_serializer='org.apache.kafka.'
'common.serialization.'
'ByteArraySerializer',
expansion_service=self.get_expansion_service()))
def test_sql(self):
with self.create_pipeline() as p:
output = (
p
| 'Create' >> beam.Create([Row(x, str(x)) for x in range(5)])
| 'Sql' >> SqlTransform(
"""SELECT col1, col2 || '*' || col2 as col2,
power(col1, 2) as col3
FROM PCOLLECTION
""",
expansion_service=self.get_expansion_service()))
assert_that(
output,
equal_to([(x, '{x}*{x}'.format(x=x), x * x) for x in range(5)]))
def test_flattened_side_input(self):
# Blocked on support for transcoding
# https://jira.apache.org/jira/browse/BEAM-6523
super().test_flattened_side_input(with_transcoding=False)
def test_metrics(self):
super().test_metrics(check_gauge=False)
def test_flink_metrics(self):
"""Run a simple DoFn that increments a counter and verifies state
caching metrics. Verifies that its expected value is written to a
temporary file by the FileReporter"""
counter_name = 'elem_counter'
state_spec = userstate.BagStateSpec('state', VarIntCoder())
class DoFn(beam.DoFn):
def __init__(self):
self.counter = Metrics.counter(self.__class__, counter_name)
_LOGGER.info('counter: %s' % self.counter.metric_name)
def process(self, kv, state=beam.DoFn.StateParam(state_spec)):
# Trigger materialization
list(state.read())
state.add(1)
self.counter.inc()
options = self.create_options()
# Test only supports parallelism of 1
options._all_options['parallelism'] = 1
# Create multiple bundles to test cache metrics
options._all_options['max_bundle_size'] = 10
options._all_options['max_bundle_time_millis'] = 95130590130
experiments = options.view_as(DebugOptions).experiments or []
experiments.append('state_cache_size=123')
options.view_as(DebugOptions).experiments = experiments
with Pipeline(self.get_runner(), options) as p:
# pylint: disable=expression-not-assigned
(
p
| "create" >> beam.Create(list(range(0, 110)))
| "mapper" >> beam.Map(lambda x: (x % 10, 'val'))
| "stateful" >> beam.ParDo(DoFn()))
lines_expected = {'counter: 110'}
if options.view_as(StandardOptions).streaming:
lines_expected.update([
# Gauges for the last finished bundle
'stateful.beam_metric:statecache:capacity: 123',
'stateful.beam_metric:statecache:size: 10',
'stateful.beam_metric:statecache:get: 20',
'stateful.beam_metric:statecache:miss: 0',
'stateful.beam_metric:statecache:hit: 20',
'stateful.beam_metric:statecache:put: 0',
'stateful.beam_metric:statecache:evict: 0',
# Counters
'stateful.beam_metric:statecache:get_total: 220',
'stateful.beam_metric:statecache:miss_total: 10',
'stateful.beam_metric:statecache:hit_total: 210',
'stateful.beam_metric:statecache:put_total: 10',
'stateful.beam_metric:statecache:evict_total: 0',
])
else:
# Batch has a different processing model. All values for
# a key are processed at once.
lines_expected.update([
# Gauges
'stateful).beam_metric:statecache:capacity: 123',
# For the first key, the cache token will not be set yet.
# It's lazily initialized after first access in StateRequestHandlers
'stateful).beam_metric:statecache:size: 10',
# We have 11 here because there are 110 / 10 elements per key
'stateful).beam_metric:statecache:get: 12',
'stateful).beam_metric:statecache:miss: 1',
'stateful).beam_metric:statecache:hit: 11',
# State is flushed back once per key
'stateful).beam_metric:statecache:put: 1',
'stateful).beam_metric:statecache:evict: 0',
# Counters
'stateful).beam_metric:statecache:get_total: 120',
'stateful).beam_metric:statecache:miss_total: 10',
'stateful).beam_metric:statecache:hit_total: 110',
'stateful).beam_metric:statecache:put_total: 10',
'stateful).beam_metric:statecache:evict_total: 0',
])
lines_actual = set()
with open(self.test_metrics_path, 'r') as f:
for line in f:
print(line, end='')
for metric_str in lines_expected:
metric_name = metric_str.split()[0]
if metric_str in line:
lines_actual.add(metric_str)
elif metric_name in line:
lines_actual.add(line)
self.assertSetEqual(lines_actual, lines_expected)
def test_sdf_with_watermark_tracking(self):
raise unittest.SkipTest("BEAM-2939")
def test_callbacks_with_exception(self):
raise unittest.SkipTest("BEAM-11021")
def test_register_finalizations(self):
raise unittest.SkipTest("BEAM-11021")
def test_custom_merging_window(self):
raise unittest.SkipTest("BEAM-11004")
# Inherits all other tests.
class FlinkRunnerTestOptimized(FlinkRunnerTest):
# TODO: Remove these tests after resolving BEAM-7248 and enabling
# PortableRunnerOptimized
def create_options(self):
options = super().create_options()
options.view_as(DebugOptions).experiments = [
'pre_optimize=all'
] + options.view_as(DebugOptions).experiments
return options
def test_external_transform(self):
raise unittest.SkipTest("BEAM-7252")
def test_expand_kafka_read(self):
raise unittest.SkipTest("BEAM-7252")
def test_expand_kafka_write(self):
raise unittest.SkipTest("BEAM-7252")
def test_sql(self):
raise unittest.SkipTest("BEAM-7252")
def test_pack_combiners(self):
# Stages produced by translations.pack_combiners are fused
# by translations.greedily_fuse, which prevent the stages
# from being detecting using counters by the test.
self._test_pack_combiners(assert_using_counter_names=False)
class FlinkRunnerTestStreaming(FlinkRunnerTest):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.enable_commit = False
def setUp(self):
self.enable_commit = False
def create_options(self):
options = super().create_options()
options.view_as(StandardOptions).streaming = True
if self.enable_commit:
options._all_options['checkpointing_interval'] = 3000
options._all_options['shutdown_sources_after_idle_ms'] = 60000
return options
def test_callbacks_with_exception(self):
self.enable_commit = True
super().test_callbacks_with_exception()
def test_register_finalizations(self):
self.enable_commit = True
super().test_register_finalizations()
if __name__ == '__main__':
# Run the tests.
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| {
"content_hash": "9286d78816e852f9eba752f8467b7b58",
"timestamp": "",
"source": "github",
"line_count": 442,
"max_line_length": 80,
"avg_line_length": 36.2579185520362,
"alnum_prop": 0.6484462748034444,
"repo_name": "axbaretto/beam",
"id": "cb6345e77efc6e8dcb7fe81128672efe38af9386",
"size": "16831",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sdks/python/apache_beam/runners/portability/flink_runner_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1598"
},
{
"name": "Batchfile",
"bytes": "3220"
},
{
"name": "C",
"bytes": "1339873"
},
{
"name": "C++",
"bytes": "1132901"
},
{
"name": "CSS",
"bytes": "124283"
},
{
"name": "Dockerfile",
"bytes": "23950"
},
{
"name": "FreeMarker",
"bytes": "7428"
},
{
"name": "Go",
"bytes": "2795906"
},
{
"name": "Groovy",
"bytes": "187109"
},
{
"name": "HTML",
"bytes": "238575"
},
{
"name": "Java",
"bytes": "39085315"
},
{
"name": "JavaScript",
"bytes": "1221326"
},
{
"name": "Jupyter Notebook",
"bytes": "7396"
},
{
"name": "Makefile",
"bytes": "354938"
},
{
"name": "Python",
"bytes": "51449019"
},
{
"name": "Roff",
"bytes": "70716"
},
{
"name": "Ruby",
"bytes": "4159"
},
{
"name": "Shell",
"bytes": "351541"
},
{
"name": "TeX",
"bytes": "70920"
},
{
"name": "Thrift",
"bytes": "1118"
}
],
"symlink_target": ""
} |
'''
error.py - Exception classes for the protobuf package.
This package contains exception classes mapped to the rpc.proto file.
Authors: Martin Norbury (mnorbury@lcogt.net)
Eric Saunders (esaunders@lcogt.net)
Jan Dittberner (jan@dittberner.info)
May 2009, Nov 2010
'''
# Module imports
from protobuf.socketrpc import rpc_pb2 as rpc_pb
class ProtobufError(Exception):
'''Base exception class for RPC protocol buffer errors.'''
def __init__(self, message, rpc_error_code):
'''ProtobufError constructor.
message - Message string detailing error.
rpc_error_code - Error code from rpc.proto file.
'''
self.message = message
self.rpc_error_code = rpc_error_code
class BadRequestDataError(ProtobufError):
'''Exception generated for a BadRequestDataError.'''
def __init__(self, message):
super(BadRequestDataError, self).__init__(
message, rpc_pb.BAD_REQUEST_DATA)
class BadRequestProtoError(ProtobufError):
'''Exception generated for a BadRequestProtoError.'''
def __init__(self, message):
super(BadRequestProtoError, self).__init__(
message, rpc_pb.BAD_REQUEST_PROTO)
class ServiceNotFoundError(ProtobufError):
'''Exception generated for a ServiceNotFoundError.'''
def __init__(self, message):
super(ServiceNotFoundError, self).__init__(
message, rpc_pb.SERVICE_NOT_FOUND)
class MethodNotFoundError(ProtobufError):
'''Exception generated for a MethodNotFoundError.'''
def __init__(self, message):
super(MethodNotFoundError, self).__init__(
message, rpc_pb.METHOD_NOT_FOUND)
class RpcError(ProtobufError):
'''Exception generated for an RpcError.'''
def __init__(self, message):
super(RpcError, self).__init__(message, rpc_pb.RPC_ERROR)
class RpcFailed(ProtobufError):
'''Exception generated for an RpcFailed.'''
def __init__(self, message):
super(RpcFailed, self).__init__(message, rpc_pb.RPC_FAILED)
class InvalidRequestProtoError(ProtobufError):
'''Exception generated for an InvalidRequestProtoError.'''
def __init__(self, message):
super(InvalidRequestProtoError, self).__init__(
message, rpc_pb.INVALID_REQUEST_PROTO)
class BadResponseProtoError(ProtobufError):
'''Exception generated for a BadResponseProtoError.'''
def __init__(self, message):
super(BadResponseProtoError, self).__init__(
message, rpc_pb.BAD_RESPONSE_PROTO)
class UnknownHostError(ProtobufError):
'''Exception generated for an UnknownHostError.'''
def __init__(self, message):
super(UnknownHostError, self).__init__(message, rpc_pb.UNKNOWN_HOST)
class IOError(ProtobufError):
'''Exception generated for an IOError.'''
def __init__(self, message):
super(IOError, self).__init__(message, rpc_pb.IO_ERROR)
| {
"content_hash": "737cf42636da048c861dba4fc73dca35",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 76,
"avg_line_length": 28.300970873786408,
"alnum_prop": 0.6716981132075471,
"repo_name": "chiiph/protobuf-socket-rpc",
"id": "7321e59c7d5123488f3b30842c99eb626f7a7330",
"size": "4087",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/protobuf/socketrpc/error.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "134361"
}
],
"symlink_target": ""
} |
'''
This is the GNU Radio II_K7 module. Place your Python package
description here (python/__init__.py).
'''
# import swig generated symbols into the II_K7 namespace
try:
# this might fail if the module is python-only
from II_K7_swig import *
except ImportError:
pass
# import any pure python here
#
| {
"content_hash": "f9300057003c9c9f766a5b1fb91121ef",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 61,
"avg_line_length": 21.785714285714285,
"alnum_prop": 0.7311475409836066,
"repo_name": "Innovative-DSP/gr-II_K7",
"id": "a19199032315692af114b84cd77f4dc2ec686047",
"size": "1134",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1049"
},
{
"name": "C++",
"bytes": "63187"
},
{
"name": "CMake",
"bytes": "88509"
},
{
"name": "Python",
"bytes": "15591"
}
],
"symlink_target": ""
} |
from bambou import NURESTFetcher
class NUDSCPRemarkingPoliciesFetcher(NURESTFetcher):
""" Represents a NUDSCPRemarkingPolicies fetcher
Notes:
This fetcher enables to fetch NUDSCPRemarkingPolicy objects.
See:
bambou.NURESTFetcher
"""
@classmethod
def managed_class(cls):
""" Return NUDSCPRemarkingPolicy class that is managed.
Returns:
.NUDSCPRemarkingPolicy: the managed class
"""
from .. import NUDSCPRemarkingPolicy
return NUDSCPRemarkingPolicy
| {
"content_hash": "8d8211af61c4e331f47c9f2eb3cdf1c0",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 72,
"avg_line_length": 23,
"alnum_prop": 0.648695652173913,
"repo_name": "nuagenetworks/vspk-python",
"id": "431c50500fc246353f0487219bd0b31f50ed0e52",
"size": "2186",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vspk/v5_0/fetchers/nudscpremarkingpolicies_fetcher.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "12909327"
}
],
"symlink_target": ""
} |
"""Indexes on updated_at
Revision ID: cd51533c624
Revises: 1afd15b0581f
Create Date: 2014-09-12 23:01:22.542186
"""
# revision identifiers, used by Alembic.
revision = 'cd51533c624'
down_revision = '1afd15b0581f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('ix_risk_assessments_updated_at', 'risk_assessments', ['updated_at'], unique=False)
def downgrade():
op.drop_index('ix_risk_assessments_updated_at', table_name='risk_assessments')
| {
"content_hash": "e3d7189a3b2f5c8e08789145d4ea4621",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 103,
"avg_line_length": 22.136363636363637,
"alnum_prop": 0.7371663244353183,
"repo_name": "vladan-m/ggrc-core",
"id": "c0f269a167c4f32d81f932e8942a60c56d2d2631",
"size": "488",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/ggrc_risk_assessments/migrations/versions/20140912230122_cd51533c624_indexes_on_updated_at.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "230813"
},
{
"name": "Cucumber",
"bytes": "148444"
},
{
"name": "HTML",
"bytes": "6041162"
},
{
"name": "JavaScript",
"bytes": "1893341"
},
{
"name": "Makefile",
"bytes": "5483"
},
{
"name": "Mako",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "1489657"
},
{
"name": "Ruby",
"bytes": "1496"
},
{
"name": "Shell",
"bytes": "11555"
}
],
"symlink_target": ""
} |
import csv
import time
from datetime import datetime
from amazon.api import AmazonAPI
AMAZON_ACCESS_KEY = 'your_access_key'
AMAZON_SECRET_KEY = 'your_secret_key'
AMAZON_ASSOC_TAG = 'your_tag'
PRODUCT_ATTRIBUTES = [
'asin', 'author', 'binding', 'brand', 'browse_nodes', 'ean', 'edition',
'editorial_review', 'eisbn', 'features', 'get_parent', 'isbn', 'label',
'large_image_url', 'list_price', 'manufacturer', 'medium_image_url',
'model', 'mpn', 'offer_url', 'parent_asin', 'part_number',
'price_and_currency', 'publication_date', 'publisher', 'region',
'release_date', 'reviews', 'sku', 'small_image_url', 'tiny_image_url',
'title', 'upc'
]
amazon = AmazonAPI(AMAZON_ACCESS_KEY, AMAZON_SECRET_KEY, AMAZON_ASSOC_TAG)
timestr = time.strftime("%Y%m%d-%H%M%S")
delimiter = ','
keywordFile = 'conf/search_keywords.csv'
outputFile = 'data/amazonItems' + timestr + '.csv'
logFile = 'logs/log' + timestr + '.csv'
failedKeywordsFile = 'logs/failedkeywords' + timestr + '.csv'
logFD = open(logFile, 'w+')
outputFD = open(outputFile, 'w+')
failedKeywordsFD = open(failedKeywordsFile, 'w+')
with open(keywordFile, newline='') as f:
reader = csv.reader(f)
f.readline()
count = 0
with open(outputFile, 'w+') as csvFile:
writer = csv.writer(csvFile, delimiter=';', quotechar='"', quoting=csv.QUOTE_ALL)
firstRow = ["product.asin", "product.title", "product.group", "product.price", "currency", "product.offer_url",
"product.large_image_url"]
writer.writerow(firstRow)
startTime = datetime.now().strftime("%Y%m%d-%H%M%S")
for row in reader:
keyword = row[0]
print(keyword)
count += 1
keywordStartTime = datetime.now().strftime("%Y%m%d-%H:%M:%S")
print("processing keyword {}: {} at {}".format(count, keyword, keywordStartTime))
logFD.write("processing keyword {}: {} at {} \n".format(count, keyword, keywordStartTime))
try:
products = amazon.search(Keywords=keyword, SearchIndex='All')
productCount = 0
for product in products:
try:
writer.writerow([product.asin, product.title, product.get_attribute('ProductGroup'),
product.price_and_currency[0], product.price_and_currency[1],
product.offer_url, product.large_image_url])
productCount += 1
except Exception as e:
print("error: %s".format(e))
logFD.write("error: %s \n".format(e))
continue
keywordEndTime = datetime.now().strftime("%Y%m%d-%H:%M:%S")
print("processed {} products of keyword: {} at {} ".format(productCount, keyword, keywordEndTime))
logFD.write(
"processed {} products of keyword: {} at {} \n".format(productCount, keyword,
keywordEndTime))
except Exception as e:
print("error: {}".format(e))
logFD.write("error: {} \n".format(e))
failedKeywordsFD.write("{}\n".format(keyword))
continue
time.sleep(2)
endTime = datetime.now().strftime("%Y%m%d-%H:%M:%S")
print("all complete at {}".format(endTime))
logFD.write("all complete at {}".format(endTime))
csvFile.close()
logFD.close()
outputFD.close()
failedKeywordsFD.close()
| {
"content_hash": "43b9489261b6bd1ebecf50197d2e860e",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 119,
"avg_line_length": 40.15555555555556,
"alnum_prop": 0.5617044825677919,
"repo_name": "yuantw/DataCollecting",
"id": "ecfd668dfde19c1b58edf957bc47dea24214630a",
"size": "3711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/AmazonProducts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "798"
},
{
"name": "Python",
"bytes": "31114"
}
],
"symlink_target": ""
} |
'''
Conventions
-----------
* Unless otherwise specified, the response is always a JSON object
* A successful response is always a dictionary with the following keys: \
'status', 'count', 'data' (in this order). Example:
.. code:: Javascript
{
"status": "success",
"count": "1",
"data": ["AnImportantValue"]
}
* An error response is always a dictionary with the following keys: \
'status', 'message'. Example:
.. code:: Javascript
{
"status": "error",
"message": "Not Found"
}
'''
from flask import request
from flask_restful import Resource, Api, representations
import os
import types
import birdseye
from birdseye import app, db, rq
import birdseye_jobs.chmod
from birdseye.jobs import image_to_observation
import birdseye.models as bm
api = Api(app)
representations.json.settings = {'indent': 4}
def api_route(self, *args, **kwargs):
def wrapper(cls):
self.add_resource(cls, *args, **kwargs)
return cls
return wrapper
api.route = types.MethodType(api_route, api)
def _success(status_code=200, **message):
return dict(status='success', **message), status_code
def _success_item(item, status_code=200):
return _success(status_code=status_code, count='1', data=[item])
def _success_data(data, count, status_code=200):
return _success(status_code=status_code, count=str(count), data=data)
def _error(message, status_code):
return dict(status='error', message=message), status_code
def _not_found():
return _error('Found no matches', 404)
@api.route('/v1')
class Root(Resource):
def get(self):
return _success(version=birdseye.__version__)
@api.route('/v1/users')
class Users(Resource):
def post(self):
data = request.get_json()
user = bm.User(data['credentials'], data['secret'])
db.session.add(user)
db.session.commit()
db.session.refresh(user)
return _success_item(user.user_id, status_code=201)
def get(self):
# TODO: check admin
users = bm.User.find_all()
return _success_data(count=len(users), data=[
u.as_public_dict() for u in users])
def delete(self):
# TODO: check admin
count = bm.User.delete_all()
db.session.commit()
return _success_item(count)
@api.route('/v1/users/<uuid:user_id>')
class User(Resource):
def get(self, user_id):
# TODO: check session
user = bm.User.find_by_id(user_id)
return _success_item(user.as_public_dict())
@api.route('/v1/sessions')
class Sessions(Resource):
def post(self):
data = request.get_json()
user = bm.User.find_by_credentials(
data['credentials'], data['secret'])
if user is None:
return _error(message='No user.', status_code=403)
ses = bm.Session(user, data.get('tokens'))
db.session.add(ses)
db.session.commit()
db.session.refresh(ses)
return _success_item(ses.session_id)
def delete(self):
# TODO: Admin
count = bm.Session.delete_all()
db.session.commit()
return _success_item(count)
@api.route('/v1/sessions/<uuid:session_id>')
class Session(Resource):
def get(self, session_id):
# TODO: check session
session = bm.Session.find_by_id(session_id)
return _success_item(session.as_public_dict())
def delete(self, session_id):
# TODO: check session
count = bm.Session.delete(session_id)
db.session.commit()
return _success_item(count)
@api.route('/v1/observations')
class Observations(Resource):
def get(self):
# TODO: check admin
observations = bm.Observation.find_all()
return _success_data(count=len(observations), data=[
o.as_public_dict() for o in observations])
def post(self):
data = request.get_json()
user = bm.User.find_by_credentials(
data.get('credentials'), data.get('secret'))
if user is None:
return _error('No user.', 403)
obs = bm.Observation(user, data.get('geometry'), data.get('media'),
data.get('properties'), data.get('species'))
db.session.add(obs)
db.session.commit()
db.session.refresh(obs)
return _success_item(obs.observation_id, status_code=201)
def delete(self):
# TODO: Admin
count = bm.Observation.delete_all()
db.session.commit()
return _success_item(count)
@api.route('/v1/mapped_observations')
class MappedObservations(Resource):
def _remap(self, obs):
result = obs.as_public_dict()
title = ', '.join([
label for _, label in obs.properties['vision_labels'][:3]])
if obs.user and obs.user.social and 'nickname' in obs.user.social:
login = obs.user.social['nickname']
else:
login = 'Yo boss! Whazzuuupp!'
result['properties'].update({
'title': title,
'place': title,
'login': login,
})
result.update({
'geometry': obs.geometry_center,
'id': obs.observation_id,
'type': 'Feature',
})
return result
def get(self):
mapped = [self._remap(obs) for obs in bm.Observation.find_all()]
return _success(
200, count=str(len(mapped)), data=mapped,
type='FeatureCollection', features=mapped)
@api.route('/v1/observations/<uuid:observation_id>')
class Observation(Resource):
def get(self, observation_id):
# TODO: check_session
observation = bm.Observation.find_by_id(observation_id)
if observation:
return _success_item(observation.as_public_dict())
else:
return _not_found()
def put(self, observation_id):
return
@api.route('/v1/media')
class Media(Resource):
def post(self):
path = request.headers.get('X-File')
url_base = 'https://birdseye.space/static/'
url = url_base + 'not-found.jpg'
if path is not None:
ext = 'jpeg'
basename = '{}.{}'.format(bm.new_uuid(), ext)
new_path = '/var/www/html/static/{}'.format(basename)
os.rename(path, new_path)
url = url_base + basename
chmod_job = rq.get_queue('www-data-chmod').enqueue(
birdseye_jobs.chmod.chmod_file, new_path)
image_to_observation.queue(new_path, url, depends_on=chmod_job)
return _success_item(url)
@api.route('/v1/species')
class Species(Resource):
def get(self):
species = bm.Species.find_all()
return _success_data(count=len(species), data=[
s.as_public_dict() for s in species])
def post(self):
data = request.get_json()
species = bm.Species(data.get('names'), data.get('labels'))
db.session.add(species)
db.session.commit()
db.session.refresh(species)
return _success_item(species.species_id, status_code=201)
def delete(self):
count = bm.Species.delete_all()
db.session.commit()
return _success_item(count)
def noqa():
pass
| {
"content_hash": "ed7147951361b49610a0ec7a87fc9dc3",
"timestamp": "",
"source": "github",
"line_count": 267,
"max_line_length": 75,
"avg_line_length": 27.086142322097377,
"alnum_prop": 0.598174778761062,
"repo_name": "NasaAppChallenge/birdseye-server",
"id": "5adb1b515811f6ea326993c3c12d133673f65e9e",
"size": "7256",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "birdseye/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "50368"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import datetime
import six
from ..base import Model
from ..fields import Field, ReadOnlyField, ListField
class OwnerReference(Model):
apiVersion = Field(six.text_type)
blockOwnerDeletion = Field(bool)
controller = Field(bool)
kind = Field(six.text_type)
name = Field(six.text_type)
uid = Field(six.text_type)
class ObjectMeta(Model):
annotations = Field(dict)
creationTimestamp = ReadOnlyField(datetime.datetime)
deletionGracePeriodSeconds = ReadOnlyField(int)
deletionTimestamp = ReadOnlyField(datetime.datetime)
finalizers = ListField(six.text_type)
generateName = Field(six.text_type)
generation = ReadOnlyField(int)
labels = Field(dict)
name = Field(six.text_type)
namespace = Field(six.text_type, "default")
ownerReferences = ListField(OwnerReference)
resourceVersion = ReadOnlyField(six.text_type)
selfLink = ReadOnlyField(six.text_type)
uid = ReadOnlyField(six.text_type)
class TypedLocalObjectReference(Model):
apiGroup = Field(six.text_type)
kind = Field(six.text_type)
name = Field(six.text_type)
class ObjectReference(Model):
apiVersion = Field(six.text_type)
fieldPath = Field(six.text_type)
kind = Field(six.text_type)
name = Field(six.text_type)
namespace = Field(six.text_type)
resourceVersion = Field(six.text_type)
uid = Field(six.text_type)
class Preconditions(Model):
resourceVersion = Field(six.text_type)
uid = Field(six.text_type)
class DeleteOptions(Model):
apiVersion = Field(six.text_type)
dryRun = ListField(six.text_type)
gracePeriodSeconds = Field(int)
kind = Field(six.text_type)
preconditions = Field(Preconditions)
propagationPolicy = Field(six.text_type)
class LocalObjectReference(Model):
name = Field(six.text_type)
| {
"content_hash": "bccb6574102509e55403dec7a6ff19fa",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 56,
"avg_line_length": 27.38235294117647,
"alnum_prop": 0.7137486573576799,
"repo_name": "fiaas/k8s",
"id": "e658458c2aa1b6f1fa34633d8c1fd824bbd49716",
"size": "2493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "k8s/models/common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "183501"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import patterns, include, url
from tastypie.api import Api
from monlog.log.api.api import LogResource
from monlog.log.api.api import LogCollectionResource
from monlog.log.api.api import ExpectationCollectionResource
log_resource = LogResource()
log_collection = LogCollectionResource()
expectation_collection = ExpectationCollectionResource()
urlpatterns = patterns('',
url(r'^', include(log_resource.urls)), # API available with POST
url(r'^', include(log_collection.urls)), # API available from GET
url(r'^', include(expectation_collection.urls)),
)
| {
"content_hash": "14c2366c035c3aa6d255c2de085a5bfc",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 70,
"avg_line_length": 39.93333333333333,
"alnum_prop": 0.7746243739565943,
"repo_name": "monlog/monlog",
"id": "b8d6eed1af31eca7bf49e489b3dd987af06c32e1",
"size": "599",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "monlog/log/api/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "7763"
},
{
"name": "Python",
"bytes": "56201"
},
{
"name": "Shell",
"bytes": "547"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
name='sphinx-nameko-theme',
version='0.0.3',
author='onefinestay',
author_email='nameko-devs@onefinestay.com',
description='Sphinx Nameko Theme',
url='https://github.com/onefinestay/sphinx-nameko-theme',
license='MIT',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',
'Topic :: Software Development :: Documentation',
],
packages=['sphinx_nameko_theme'],
install_requires=['sphinx'],
entry_points = {
'sphinx_themes': [
'path = sphinx_nameko_theme:get_html_theme_path',
]
},
include_package_data=True,
zip_safe=False
)
| {
"content_hash": "56dd3e7b301a41c2dcd0f19f464d74f4",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 61,
"avg_line_length": 29.862068965517242,
"alnum_prop": 0.6073903002309469,
"repo_name": "onefinestay/sphinx-nameko-theme",
"id": "8d972dc4665ae9753aae330e95fe60f6b7376dbd",
"size": "866",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4293"
},
{
"name": "HTML",
"bytes": "1562"
},
{
"name": "Python",
"bytes": "1177"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
import os
version = '0.1.0'
setup(name='django_drole',
version=version,
description="Django Drole: Zope/Plone inspired roles/permissions",
long_description=open("README.md").read(),
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='Ivo van der Wijk',
author_email='two@in.m3r.nl',
url='http://github.com/iivvoo/django_drole',
license='BSD',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'pytest',
'coverage',
],
entry_points={
},
)
| {
"content_hash": "2a13e721b61ee8bb0f5c326e59aa470a",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 72,
"avg_line_length": 24.35483870967742,
"alnum_prop": 0.5788079470198676,
"repo_name": "iivvoo/django_drole",
"id": "7579478c502afab947dfe41f8b68c9ef9a31cb9c",
"size": "755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "17005"
}
],
"symlink_target": ""
} |
"""
Martin O'Hanlon
www.stuffaboutcode.com
Pygame radar
Attribution:
Some radar code - http://simpson.edu/computer-science/
Circle on line equation - http://codereview.stackexchange.com/questions/86421/line-segment-to-circle-collision-algorithm
"""
import pygame
import math
import threading
BLACK = (0, 0, 0)
GREEN = (0, 255, 0)
class Radar(threading.Thread):
def __init__(self, screen, radar_rect, radar_pos = (0, 0), scale = 1,
back_col = BLACK, radar_col = GREEN):
#setup threading
threading.Thread.__init__(self)
self.screen = screen
self.radar_rect = radar_rect
self.centre = (radar_rect[0] + (radar_rect[2] / 2),
radar_rect[1] + (radar_rect[3] / 2))
self.sweeplen = (radar_rect[2] / 2)
self.radar_pos = radar_pos
self.scale = scale
self.back_col = back_col
self.radar_col = radar_col
self.running = False
self.stopped = True
self.dots = {}
def run(self):
self.running = True
self.stopped = False
angle = 0
#TODO - set the back of the radar to the back colour
while not self.stopped:
# Calculate the x,y for the end point of our 'sweep' based on the current angle
x = self.centre[0] + math.sin(angle) * self.sweeplen
y = self.centre[1] + math.cos(angle) * self.sweeplen
# Draw the line from the center at 145, 145 to the calculated end spot
pygame.draw.line(self.screen, self.radar_col, [self.centre[0], self.centre[1]], [x, y], 2)
self._display_dots_on_line(self.centre[0], self.centre[1], x, y)
# Draw the outline of a circle to 'sweep' the line around
pygame.draw.ellipse(self.screen, self.radar_col, self.radar_rect, 2)
#update the display, wait for the next frame
pygame.display.update()
pygame.time.Clock().tick(60)
#redraw the line in black to clear it
pygame.draw.line(self.screen, self.back_col, [self.centre[0], self.centre[1]], [x, y], 2)
# Increase the angle by 0.03 radians
angle = angle + .03
# If we have done a full sweep, reset the angle to 0
if angle > 2 * math.pi:
angle = angle - 2 * math.pi
self.running = False
def stop(self):
self.stopped = True
#stop the dots
for key, dot in self.dots.items():
dot.stop()
#wait till its stopped
while(self.running):
pygame.time.wait(10)
def dot_add(self, dot_id, x, y, data = None, back_col = None, dot_col = None):
if back_col == None: back_col = self.back_col
if dot_col == None: dot_col = self.radar_col
#work out x, y on screen
screen_x, screen_y = self._calc_screen_x_y(x, y)
#does the dot already exist?
if dot_id in self.dots:
dot = self.dots[dot_id]
dot.move(screen_x, screen_y)
else:
dot = RadarDot(self.screen, screen_x, screen_y, 10, data = data, back_col = back_col, dot_col = dot_col)
self.dots[dot_id] = dot
def dot_remove(self, dot_id):
if dot_id in self.dots:
del self.dots[dot_id]
def dot_move(self, dot_id, x, y):
screen_x, screen_y = self._calc_screen_x_y(x, y)
self.dots[dot_id].move(screen_x, screen_y)
def dot_move_by(self, dot_id, x, y):
self.dots[dot_id].move_by(x, y)
def _calc_screen_x_y(self, x, y):
diff_x = (x - self.radar_pos[0]) * self.scale
diff_y = (y - self.radar_pos[1]) * self.scale
screen_x = int(round(self.centre[0] + diff_x, 0))
screen_y = int(round(self.centre[1] + diff_y, 0))
return screen_x, screen_y
def _display_dots_on_line(self, x1, y1, x2, y2):
for key, dot in self.dots.copy().items():
if dot.fade_step == 0 or dot.fade_step > 50:
intersect, pos = dot.on_line(x1, y1, x2, y2)
if intersect == True:
dot.show()
def dot_at_point(self, point):
dot_found = None
for key, dot in self.dots.copy().items():
if dot.rect.collidepoint(point):
dot_found = key
return dot_found
def set_scale(self, new_scale):
self.scale = new_scale
for key, dot in self.dots.copy().items():
self.dot_move(key, dot.x, dot.y)
class RadarDot():
def __init__(self, screen, x, y, radius,
back_col = BLACK, dot_col = GREEN,
fade_time = 4000, no_of_fade_steps = 100, data = None):
self.screen = screen
self.radius = radius
self.move(x, y)
self.back_col = back_col
self.dot_col = dot_col
self.fade_time = fade_time
self.no_of_fade_steps = no_of_fade_steps
self.fade_step = 0
self.fade_running = False
self.data = data
def show(self):
#if the fade is running
if self.fade_running:
#reset the fade
self.fade_step = 0
else:
#start the fade again
self.fade_thread = threading.Thread(target = self._fade)
self.fade_thread.start()
def _fade(self):
#calc fade steps
fade_steps = self._calc_fade(self.dot_col, self.back_col, self.no_of_fade_steps)
fade_delay = int(self.fade_time / self.no_of_fade_steps)
#keep fading out the dot until it disappears or its stopped
now = pygame.time.get_ticks()
self.fade_step = 0
self.fade_running = True
#keep fading the dot until its the back col or its stopped
while self.fade_step < (len(fade_steps) - 1) and self.fade_running == True:
#where am i drawing the dot this time?
lastx, lasty = self.x, self.y
pygame.draw.ellipse(self.screen,
fade_steps[self.fade_step],
(lastx - self.radius, lasty - self.radius, self.radius, self.radius), 0)
#pygame.display.update()
#wait until the next time or the fade is stopped
till = now + fade_delay
while till > pygame.time.get_ticks() and self.fade_running == True:
pygame.time.wait(10)
now = till
#draw over the last ellipse in black
pygame.draw.ellipse(self.screen,
self.back_col,
(lastx - self.radius, lasty - self.radius, self.radius, self.radius), 0)
self.fade_step += 1
self.fade_running = False
#pygame.display.update()
def move(self, x, y):
self.x = x
self.y = y
self.rect = pygame.Rect(self.x - self.radius, self.y - self.radius, self.radius, self.radius)
def move_by(self, x, y):
self.move(self.x + x, self.y + y)
def _calc_fade(self, start_rgb, end_rgb, steps):
#work out the number of colours and steps
r_step = (end_rgb[0] - start_rgb[0]) / steps
g_step = (end_rgb[1] - start_rgb[1]) / steps
b_step = (end_rgb[2] - start_rgb[2]) / steps
rgb_steps = []
rgb_steps.append(start_rgb)
for step in range(1, steps):
last_rgb = rgb_steps[step - 1]
this_rgb = (last_rgb[0] + r_step,
last_rgb[1] + g_step,
last_rgb[2] + b_step)
rgb_steps.append(this_rgb)
rgb_steps.append(end_rgb)
return rgb_steps
def stop(self):
#if the fade is running, stop it
if self.fade_running:
self.fade_running = False
self.fade_thread.join()
def on_line(self, x1, y1, x2, y2):
Q = pygame.math.Vector2(self.x, self.y)
r = self.radius
P1 = pygame.math.Vector2(x1, y1)
V = pygame.math.Vector2(x2, y2) - P1
a = V.dot(V)
b = 2 * V.dot(P1 - Q)
c = P1.dot(P1) + Q.dot(Q) - 2 * P1.dot(Q) - r**2
disc = b**2 - 4 * a * c
if disc < 0:
return False, None
sqrt_disc = math.sqrt(disc)
t1 = (-b + sqrt_disc) / (2 * a)
t2 = (-b - sqrt_disc) / (2 * a)
if not (0 <= t1 <= 1 or 0 <= t2 <= 1):
return False, None
t = max(0, min(1, - b / (2 * a)))
return True, P1 + t * V
if __name__ == "__main__":
pygame.init()
# Set the height and width of the screen
SIZE = [600, 600]
BORDER = 10
#create the screen
screen = pygame.display.set_mode(SIZE)
# Set the screen background
screen.fill(BLACK)
# Dimensions and location of radar sweep
radar_rect = [BORDER, BORDER, SIZE[0] - (BORDER * 2), SIZE[1] - (BORDER * 2)]
radar = Radar(screen, radar_rect)
radar.start()
radar.dot_add(1, -200, -200)
radar.dot_add(2, 200, 200)
radar.dot_add(3, -200, 250)
# Loop until the user clicks the close button.
done = False
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
pygame.time.wait(75)
radar.dot_move_by(1, 1, 1.5)
radar.dot_move_by(2, -1.4, -1)
radar.dot_move_by(3, 0, -1.5)
radar.stop()
pygame.quit()
| {
"content_hash": "ad5cd59162bf7c6085920753d2a71c05",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 120,
"avg_line_length": 31.23856209150327,
"alnum_prop": 0.5287163929281306,
"repo_name": "martinohanlon/PiAwareRadar",
"id": "337c3e8111d707c9d45f6ae3ee7def947ca71cf6",
"size": "9559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "piawareradar/radar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22520"
}
],
"symlink_target": ""
} |
"""Tests the Turbinia processor."""
import os
import unittest
import mock
import six
from libcloudforensics.providers.gcp.internal.compute import GoogleComputeDisk
from dftimewolf.lib import state
from dftimewolf.lib import errors
# The easiest way to load our test Turbinia config is to add an environment
# variable
current_dir = os.path.dirname(os.path.realpath(__file__))
os.environ['TURBINIA_CONFIG_PATH'] = os.path.join(current_dir, 'test_data')
# pylint: disable=wrong-import-position
from dftimewolf.lib.containers import containers
from dftimewolf.lib.processors import turbinia_gcp
from dftimewolf import config
# Manually set TURBINIA_PROJECT to the value we expect.
# pylint: disable=wrong-import-position, wrong-import-order
from turbinia import config as turbinia_config
from turbinia import message as turbinia_message
turbinia_config.TURBINIA_PROJECT = 'turbinia-project'
YARA_RULE = """rule dummy { condition: false }"""
class TurbiniaGCPProcessorTest(unittest.TestCase):
"""Tests for the Turbinia processor."""
def testInitialization(self):
"""Tests that the processor can be initialized."""
test_state = state.DFTimewolfState(config.Config)
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
self.assertIsNotNone(turbinia_processor)
@mock.patch('turbinia.client.get_turbinia_client')
# pylint: disable=invalid-name
def testSetup(self, _mock_TurbiniaClient):
"""Tests that the processor is set up correctly."""
test_state = state.DFTimewolfState(config.Config)
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
turbinia_processor.SetUp(
turbinia_config_file=None,
disk_names='disk-1',
project='turbinia-project',
turbinia_recipe=None,
turbinia_zone='europe-west1',
sketch_id=123)
# pylint: disable=line-too-long
turbinia_processor.client.create_request.return_value = turbinia_message.TurbiniaRequest()
self.assertEqual(turbinia_processor.project, 'turbinia-project')
self.assertEqual(turbinia_processor.turbinia_zone, 'europe-west1')
self.assertEqual(turbinia_processor.turbinia_recipe, None)
self.assertEqual(turbinia_processor.sketch_id, 123)
self.assertEqual(test_state.errors, [])
# pytype: disable=attribute-error
self.assertEqual(
'disk-1',
test_state.GetContainers(
turbinia_processor.GetThreadOnContainerType())[0].name)
# pytype: enable=attribute-error
# TURBINIA_REGION is dynamically generated
# pylint: disable=no-member
self.assertEqual(
turbinia_processor.turbinia_region,
turbinia_gcp.turbinia_config.TURBINIA_REGION)
# pylint: disable=protected-access
six.assertRegex(
self, turbinia_processor._output_path, '(/tmp/tmp|/var/folders).+')
@mock.patch('turbinia.client.get_turbinia_client')
# pylint: disable=invalid-name
def testWrongProject(self, _mock_TurbiniaClient):
"""Tests that specifying the wrong Turbinia project generates an error."""
test_state = state.DFTimewolfState(config.Config)
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
with self.assertRaises(errors.DFTimewolfError) as error:
turbinia_processor.SetUp(
turbinia_config_file=None,
disk_names='disk-1',
project='turbinia-wrong-project',
turbinia_recipe=None,
turbinia_zone='europe-west1',
sketch_id=1234)
# pylint: disable=line-too-long
turbinia_processor.client.create_request.return_value = turbinia_message.TurbiniaRequest()
self.assertEqual(len(test_state.errors), 1)
self.assertEqual(test_state.errors[0], error.exception)
error_msg = error.exception.message
self.assertEqual(
error_msg, 'Specified project turbinia-wrong-project does '
'not match Turbinia configured project '
'turbinia-project. Use '
'gcp_turbinia_disk_copy_ts recipe to copy the '
'disk into the same project.')
self.assertTrue(error.exception.critical)
@mock.patch('dftimewolf.lib.processors.turbinia_base.turbinia_config')
@mock.patch('turbinia.client.get_turbinia_client')
# pylint: disable=invalid-name
def testWrongSetup(self, _mock_TurbiniaClient, mock_turbinia_config):
"""Tests that invalid setup options generate errors."""
params = [{
'turbinia_config_file': None,
'disk_names': 'disk-1',
'project': None,
'turbinia_recipe': None,
'turbinia_zone': 'europe-west1',
'sketch_id': None,
}, {
'turbinia_config_file': None,
'disk_names': 'disk-1',
'project': 'turbinia-project',
'turbinia_recipe': None,
'turbinia_zone': None,
'sketch_id': None,
}]
expected_error = (
'project or turbinia_zone are not all '
'specified, bailing out')
for combination in params:
mock_turbinia_config.TURBINIA_PROJECT = combination['project']
mock_turbinia_config.TURBINIA_ZONE = combination['turbinia_zone']
test_state = state.DFTimewolfState(config.Config)
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
with self.assertRaises(errors.DFTimewolfError) as error:
turbinia_processor.SetUp(**combination)
self.assertEqual(len(test_state.errors), 1)
self.assertEqual(test_state.errors[0], error.exception)
error_msg = error.exception.message
self.assertEqual(error_msg, expected_error)
self.assertTrue(error.exception.critical)
@mock.patch('magic.Magic')
@mock.patch('os.path.exists')
@mock.patch('turbinia.output_manager.GCSOutputWriter')
@mock.patch('turbinia.evidence.GoogleCloudDisk')
@mock.patch('turbinia.client.get_turbinia_client')
# pylint: disable=invalid-name
def testProcessFromParams(
self, _mock_TurbiniaClient, mock_GoogleCloudDisk, mock_GCSOutputWriter,
mock_exists, mock_magic):
"""Tests that the processor processes data correctly when a disk name is
passed in as a parameter."""
test_state = state.DFTimewolfState(config.Config)
test_state.StoreContainer(
containers.YaraRule(
name='dummy_yara', rule_text="rule dummy { condition: false }"))
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
turbinia_processor.SetUp(
turbinia_config_file=None,
disk_names='disk-1',
project='turbinia-project',
turbinia_recipe=None,
turbinia_zone='europe-west1',
sketch_id=4567)
recipe = {
'globals': {
'sketch_id':
4567,
'yara_rules':
YARA_RULE,
'jobs_denylist': [
'StringsJob', 'BinaryExtractorJob', 'BulkExtractorJob',
'PhotorecJob'
]
}
}
# pylint: disable=line-too-long
turbinia_processor.client.create_request.return_value = turbinia_message.TurbiniaRequest(
recipe=recipe)
turbinia_processor.client.get_task_data.return_value = [{
'saved_paths': [
'/fake/data.plaso',
'/fake/data2.plaso',
'/another/random/file.txt',
'gs://BinaryExtractorTask.tar.gz',
]
}]
# Return true so the tests assumes the above file exists
mock_exists.return_value = True
# Our GS path will be downloaded to this fake local path
local_mock = mock.MagicMock()
local_mock.copy_from.return_value = '/local/BinaryExtractorTask.tar.gz'
mock_GCSOutputWriter.return_value = local_mock
magic_mocked = mock.MagicMock()
magic_mocked.from_file.return_value = 'text/plain'
mock_magic.return_value = magic_mocked
turbinia_processor.PreProcess()
in_containers = test_state.GetContainers(
turbinia_processor.GetThreadOnContainerType())
for c in in_containers:
turbinia_processor.Process(c) # pytype: disable=wrong-arg-types
# GetContainers returns the abstract base class type, but process is
# called with the instantiated child class.
turbinia_processor.PostProcess()
mock_GoogleCloudDisk.assert_called_with(
disk_name='disk-1', project='turbinia-project', zone='europe-west1')
# These are mock classes, so there is a member
# pylint: disable=no-member
turbinia_processor.client.send_request.assert_called()
request = turbinia_processor.client.send_request.call_args[0][0]
self.assertEqual(request.recipe['globals']['sketch_id'], 4567)
self.assertListEqual(
request.recipe['globals']['jobs_denylist'],
['StringsJob', 'BinaryExtractorJob', 'BulkExtractorJob', 'PhotorecJob'])
turbinia_processor.client.get_task_data.assert_called()
self.assertEqual(
request.recipe['globals']['yara_rules'],
"rule dummy { condition: false }")
# pylint: disable=protected-access
mock_GCSOutputWriter.assert_any_call(
'gs://BinaryExtractorTask.tar.gz',
local_output_dir=turbinia_processor._output_path)
self.assertEqual(test_state.errors, [])
ti_containers = test_state.GetContainers(containers.ThreatIntelligence)
file_containers = test_state.GetContainers(containers.File)
# Make sure that file.txt is ignored
self.assertEqual(len(file_containers), 2)
self.assertEqual(ti_containers[0].name, 'BinaryExtractorResults')
self.assertEqual(ti_containers[0].path, '/local/BinaryExtractorTask.tar.gz')
self.assertEqual(file_containers[0].name, 'turbinia-project-disk-1')
self.assertEqual(file_containers[1].name, 'turbinia-project-disk-1')
self.assertEqual(file_containers[0].path, '/fake/data.plaso')
self.assertEqual(file_containers[1].path, '/fake/data2.plaso')
@mock.patch('os.path.exists')
@mock.patch('turbinia.output_manager.GCSOutputWriter')
@mock.patch('turbinia.evidence.GoogleCloudDisk')
@mock.patch('turbinia.client.get_turbinia_client')
# pylint: disable=invalid-name
def testProcessFromState(
self, _mock_TurbiniaClient, mock_GoogleCloudDisk, mock_GCSOutputWriter,
mock_exists):
"""Tests that the processor processes data correctly when a GCEDisk is
received from the state.
"""
test_state = state.DFTimewolfState(config.Config)
test_state.StoreContainer(
containers.YaraRule(
name='dummy_yara', rule_text="rule dummy { condition: false }")
)
test_state.StoreContainer(containers.GCEDisk(
name='disk-1', project='turbinia-project'))
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
turbinia_processor.SetUp(
turbinia_config_file=None,
project='turbinia-project',
turbinia_recipe=None,
turbinia_zone='europe-west1',
sketch_id=4567)
recipe = {
'globals': {
'sketch_id':
4567,
'yara_rules':
YARA_RULE,
'jobs_denylist': [
'StringsJob', 'BinaryExtractorJob', 'BulkExtractorJob',
'PhotorecJob'
]
}
}
# pylint: disable=line-too-long
turbinia_processor.client.create_request.return_value = turbinia_message.TurbiniaRequest(
recipe=recipe)
turbinia_processor.client.get_task_data.return_value = [{
'saved_paths': [
'/fake/data.plaso',
'/fake/data2.plaso',
'/another/random/file.txt',
'gs://BinaryExtractorTask.tar.gz',
]
}]
# Return true so the tests assumes the above file exists
mock_exists.return_value = True
# Our GS path will be downloaded to this fake local path
local_mock = mock.MagicMock()
local_mock.copy_from.return_value = '/local/BinaryExtractorTask.tar.gz'
mock_GCSOutputWriter.return_value = local_mock
turbinia_processor.PreProcess()
in_containers = test_state.GetContainers(
turbinia_processor.GetThreadOnContainerType())
for c in in_containers:
turbinia_processor.Process(c) # pytype: disable=wrong-arg-types
# GetContainers returns the abstract base class type, but process is
# called with the instantiated child class.
turbinia_processor.PostProcess()
mock_GoogleCloudDisk.assert_called_with(
disk_name='disk-1', project='turbinia-project', zone='europe-west1')
# These are mock classes, so there is a member
# pylint: disable=no-member
turbinia_processor.client.send_request.assert_called()
request = turbinia_processor.client.send_request.call_args[0][0]
self.assertEqual(request.recipe['globals']['sketch_id'], 4567)
self.assertListEqual(
request.recipe['globals']['jobs_denylist'],
['StringsJob', 'BinaryExtractorJob', 'BulkExtractorJob', 'PhotorecJob'])
turbinia_processor.client.get_task_data.assert_called()
self.assertEqual(
request.recipe['globals']['yara_rules'],
"rule dummy { condition: false }")
# pylint: disable=protected-access
mock_GCSOutputWriter.assert_any_call(
'gs://BinaryExtractorTask.tar.gz',
local_output_dir=turbinia_processor._output_path)
self.assertEqual(test_state.errors, [])
ti_containers = test_state.GetContainers(containers.ThreatIntelligence)
file_containers = test_state.GetContainers(containers.File)
# Make sure that file.txt is ignored
self.assertEqual(len(file_containers), 2)
self.assertEqual(ti_containers[0].name, 'BinaryExtractorResults')
self.assertEqual(ti_containers[0].path, '/local/BinaryExtractorTask.tar.gz')
self.assertEqual(file_containers[0].name, 'turbinia-project-disk-1')
self.assertEqual(file_containers[1].name, 'turbinia-project-disk-1')
self.assertEqual(file_containers[0].path, '/fake/data.plaso')
self.assertEqual(file_containers[1].path, '/fake/data2.plaso')
@mock.patch('turbinia.evidence.GoogleCloudDisk')
@mock.patch('turbinia.client.get_turbinia_client')
# pylint: disable=invalid-name
def testProcessCrossProject(self,
_mock_TurbiniaClient,
mock_GoogleCloudDisk):
"""Tests that process does nothing if the disks are in another project."""
test_state = state.DFTimewolfState(config.Config)
test_state.StoreContainer(containers.GCEDisk(
name='disk-1', project='another-project'))
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
turbinia_processor.SetUp(
turbinia_config_file=None,
project='turbinia-project',
turbinia_zone='europe-west1',
turbinia_recipe=None,
sketch_id=4567)
turbinia_processor.PreProcess()
in_containers = test_state.GetContainers(
turbinia_processor.GetThreadOnContainerType())
for c in in_containers:
turbinia_processor.Process(c) # pytype: disable=wrong-arg-types
# GetContainers returns the abstract base class type, but process is
# called with the instantiated child class.
turbinia_processor.PostProcess()
file_containers = test_state.GetContainers(containers.File)
self.assertEqual(len(file_containers), 0)
# pylint: disable=no-member
mock_GoogleCloudDisk.assert_not_called()
turbinia_processor.client.send_request.assert_not_called()
@mock.patch('turbinia.output_manager.GCSOutputWriter')
# pylint: disable=invalid-name
def testDownloadFilesFromGCS(self, mock_GCSOutputWriter):
"""Tests _DownloadFilesFromGCS"""
def _fake_copy(filename):
return '/fake/local/' + filename.rsplit('/')[-1]
test_state = state.DFTimewolfState(config.Config)
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
mock_GCSOutputWriter.return_value.copy_from = _fake_copy
fake_paths = ['gs://hashes.json', 'gs://results.plaso']
# pylint: disable=protected-access
local_paths = turbinia_processor._DownloadFilesFromGCS('fake', fake_paths)
self.assertEqual(
local_paths, [('fake', '/fake/local/hashes.json'),
('fake', '/fake/local/results.plaso')])
def testDeterminePaths(self):
"""Tests _DeterminePaths"""
test_state = state.DFTimewolfState(config.Config)
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
fake_task_data = [{
'saved_paths': ['/local/path.plaso', '/ignoreme/'],
}, {
'saved_paths': ['gs://hashes.json', '/tmp/BinaryExtractorTask.tar.gz'],
}]
# pylint: disable=protected-access
local_paths, gs_paths = turbinia_processor._DeterminePaths(fake_task_data)
self.assertEqual(
local_paths, ['/local/path.plaso', '/tmp/BinaryExtractorTask.tar.gz'])
self.assertEqual(gs_paths, ['gs://hashes.json'])
@mock.patch('turbinia.client.get_turbinia_client')
# pylint: disable=invalid-name
def testPreProcess(self, _mockTurbiniaClient):
"""Tests that ForensicsVM containers are picked up properly.
We store a ForensicsVM container in the state, run PreProcess, then check
that the state contains a GCEDisk as expected."""
test_state = state.DFTimewolfState(config.Config)
test_state.StoreContainer(
containers.ForensicsVM(
name='ForensicsVM',
evidence_disk=GoogleComputeDisk(
name='disk-1',
project_id='turbinia-project',
zone='europe-west1'),
platform='gcp'))
turbinia_processor = turbinia_gcp.TurbiniaGCPProcessor(test_state)
turbinia_processor.SetUp(
turbinia_config_file=None,
project='turbinia-project',
turbinia_recipe=None,
turbinia_zone='europe-west1',
sketch_id=4567)
# pylint: disable=line-too-long
turbinia_processor.client.create_request.return_value = turbinia_message.TurbiniaRequest()
turbinia_processor.PreProcess()
out_containers = test_state.GetContainers(containers.GCEDisk)
self.assertEqual(len(out_containers), 1)
self.assertEqual(out_containers[0].name, 'disk-1')
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "fb9e79ecb793859ed1aee40362fab075",
"timestamp": "",
"source": "github",
"line_count": 450,
"max_line_length": 96,
"avg_line_length": 39.937777777777775,
"alnum_prop": 0.6847874471399955,
"repo_name": "log2timeline/dftimewolf",
"id": "38bed1f69dd7772bef9fce246c48a02cfa077c6d",
"size": "18018",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/lib/processors/turbinia_gcp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "633"
},
{
"name": "Python",
"bytes": "980195"
},
{
"name": "Shell",
"bytes": "11863"
}
],
"symlink_target": ""
} |
import webkitpy.thirdparty.unittest2 as unittest
from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
from webkitpy.common.host_mock import MockHost
from webkitpy.common.webkit_finder import WebKitFinder
class BaselineOptimizerTest(unittest.TestCase):
def test_move_baselines(self):
host = MockHost()
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/win/another/test-expected.txt', 'result A')
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/mac/another/test-expected.txt', 'result A')
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/another/test-expected.txt', 'result B')
baseline_optimizer = BaselineOptimizer(host, host.port_factory.all_port_names())
baseline_optimizer._move_baselines('another/test-expected.txt', {
'/mock-checkout/third_party/WebKit/LayoutTests/platform/win': 'aaa',
'/mock-checkout/third_party/WebKit/LayoutTests/platform/mac': 'aaa',
'/mock-checkout/third_party/WebKit/LayoutTests': 'bbb',
}, {
'/mock-checkout/third_party/WebKit/LayoutTests': 'aaa',
})
self.assertEqual(host.filesystem.read_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/another/test-expected.txt'), 'result A')
def _assertOptimization(self, results_by_directory, expected_new_results_by_directory, baseline_dirname=''):
host = MockHost()
fs = host.filesystem
webkit_base = WebKitFinder(fs).webkit_base()
baseline_name = 'mock-baseline-expected.txt'
for dirname, contents in results_by_directory.items():
path = fs.join(webkit_base, 'LayoutTests', dirname, baseline_name)
fs.write_binary_file(path, contents)
baseline_optimizer = BaselineOptimizer(host, host.port_factory.all_port_names())
self.assertTrue(baseline_optimizer.optimize(fs.join(baseline_dirname, baseline_name)))
for dirname, contents in expected_new_results_by_directory.items():
path = fs.join(webkit_base, 'LayoutTests', dirname, baseline_name)
if contents is None:
self.assertFalse(fs.exists(path))
else:
self.assertEqual(fs.read_binary_file(path), contents)
# Check that the files that were in the original set have been deleted where necessary.
for dirname in results_by_directory:
path = fs.join(webkit_base, 'LayoutTests', dirname, baseline_name)
if not dirname in expected_new_results_by_directory:
self.assertFalse(fs.exists(path))
def test_linux_redundant_with_win(self):
self._assertOptimization({
'platform/win': '1',
'platform/linux': '1',
}, {
'platform/win': '1',
})
def test_covers_mac_win_linux(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '1',
'platform/linux': '1',
'': None,
}, {
'': '1',
})
def test_overwrites_root(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '1',
'platform/linux': '1',
'': '2',
}, {
'': '1',
})
def test_no_new_common_directory(self):
self._assertOptimization({
'platform/mac': '1',
'platform/linux': '1',
'': '2',
}, {
'platform/mac': '1',
'platform/linux': '1',
'': '2',
})
def test_local_optimization(self):
self._assertOptimization({
'platform/mac': '1',
'platform/linux': '1',
'platform/linux-x86': '1',
}, {
'platform/mac': '1',
'platform/linux': '1',
})
def test_local_optimization_skipping_a_port_in_the_middle(self):
self._assertOptimization({
'platform/mac-snowleopard': '1',
'platform/win': '1',
'platform/linux-x86': '1',
}, {
'platform/mac-snowleopard': '1',
'platform/win': '1',
})
def test_baseline_redundant_with_root(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '2',
'': '2',
}, {
'platform/mac': '1',
'': '2',
})
def test_root_baseline_unused(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '2',
'': '3',
}, {
'platform/mac': '1',
'platform/win': '2',
})
def test_root_baseline_unused_and_non_existant(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '2',
}, {
'platform/mac': '1',
'platform/win': '2',
})
def test_virtual_root_redundant_with_actual_root(self):
self._assertOptimization({
'virtual/softwarecompositing': '2',
'compositing': '2',
}, {
'virtual/softwarecompositing': None,
'compositing': '2',
}, baseline_dirname='virtual/softwarecompositing')
def test_virtual_root_redundant_with_ancestors(self):
self._assertOptimization({
'virtual/softwarecompositing': '2',
'platform/mac/compositing': '2',
'platform/win/compositing': '2',
}, {
'virtual/softwarecompositing': None,
'compositing': '2',
}, baseline_dirname='virtual/softwarecompositing')
def test_virtual_root_not_redundant_with_ancestors(self):
self._assertOptimization({
'virtual/softwarecompositing': '2',
'platform/mac/compositing': '1',
}, {
'virtual/softwarecompositing': '2',
'platform/mac/compositing': '1',
}, baseline_dirname='virtual/softwarecompositing')
| {
"content_hash": "cb88344e3582654f9ffb81405a7d1a60",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 145,
"avg_line_length": 36.90909090909091,
"alnum_prop": 0.5668308702791461,
"repo_name": "lordmos/blink",
"id": "881d112706cd75d1804ec2ebc1b7e4c523136baf",
"size": "7617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tools/Scripts/webkitpy/common/checkout/baselineoptimizer_unittest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "6433"
},
{
"name": "C",
"bytes": "753714"
},
{
"name": "C++",
"bytes": "40028043"
},
{
"name": "CSS",
"bytes": "539440"
},
{
"name": "F#",
"bytes": "8755"
},
{
"name": "Java",
"bytes": "18650"
},
{
"name": "JavaScript",
"bytes": "25700387"
},
{
"name": "Objective-C",
"bytes": "426711"
},
{
"name": "PHP",
"bytes": "141755"
},
{
"name": "Perl",
"bytes": "901523"
},
{
"name": "Python",
"bytes": "3748305"
},
{
"name": "Ruby",
"bytes": "141818"
},
{
"name": "Shell",
"bytes": "9635"
},
{
"name": "XSLT",
"bytes": "49328"
}
],
"symlink_target": ""
} |
from poultry import readline_dir
def test_readline_dir(tweet_collection_dir):
"""Read a tweet collection directory.
:param str input_dir: the patht to the directory
:return: an iterable of Tweet objects
"""
tweets = list(readline_dir(tweet_collection_dir))
t1, t2, t3 = tweets
assert t1.id == 190800262909276162
assert t2.id == 195415832510201856
assert t3.id == 201239221502099456
| {
"content_hash": "514ed674be0e182881bf0835290091e5",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 53,
"avg_line_length": 23.61111111111111,
"alnum_prop": 0.6964705882352941,
"repo_name": "dimazest/poultry",
"id": "c0c17f50674f99a06cd9353be1483840b67d0927",
"size": "425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_producers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73285"
}
],
"symlink_target": ""
} |
from chat import app
from flask.ext.mongoengine import MongoEngine
from pymongo import MongoClient
# client = MongoClient('localhost', 27017)
# db = MongoEngine(app)
| {
"content_hash": "11fc4ed534a71b0045c4a80518c87dc0",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 45,
"avg_line_length": 27.833333333333332,
"alnum_prop": 0.7904191616766467,
"repo_name": "fengsp/chat",
"id": "4fd99ff2932b223a1192faddedc4109740c4c152",
"size": "167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chat/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "8242"
},
{
"name": "JavaScript",
"bytes": "2677"
},
{
"name": "Python",
"bytes": "3498"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from push_notifications.api.rest_framework import (
APNSDeviceSerializer, GCMDeviceSerializer, ValidationError
)
GCM_DRF_INVALID_HEX_ERROR = {"device_id": [u"Device ID is not a valid hex number"]}
GCM_DRF_OUT_OF_RANGE_ERROR = {"device_id": [u"Device ID is out of range"]}
class APNSDeviceSerializerTestCase(TestCase):
def test_validation(self):
# valid data - 32 bytes upper case
serializer = APNSDeviceSerializer(data={
"registration_id": "AEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAEAE",
"name": "Apple iPhone 6+",
"device_id": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
self.assertTrue(serializer.is_valid())
# valid data - 32 bytes lower case
serializer = APNSDeviceSerializer(data={
"registration_id": "aeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeae",
"name": "Apple iPhone 6+",
"device_id": "ffffffffffffffffffffffffffffffff",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
self.assertTrue(serializer.is_valid())
# valid data - 100 bytes upper case
serializer = APNSDeviceSerializer(data={
"registration_id": "AE" * 100,
"name": "Apple iPhone 6+",
"device_id": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
})
self.assertTrue(serializer.is_valid())
# valid data - 100 bytes lower case
serializer = APNSDeviceSerializer(data={
"registration_id": "ae" * 100,
"name": "Apple iPhone 6+",
"device_id": "ffffffffffffffffffffffffffffffff",
})
self.assertTrue(serializer.is_valid())
# invalid data - device_id, registration_id
serializer = APNSDeviceSerializer(data={
"registration_id": "invalid device token contains no hex",
"name": "Apple iPhone 6+",
"device_id": "ffffffffffffffffffffffffffffake",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
self.assertFalse(serializer.is_valid())
class GCMDeviceSerializerTestCase(TestCase):
def test_device_id_validation_pass(self):
serializer = GCMDeviceSerializer(data={
"registration_id": "foobar",
"name": "Galaxy Note 3",
"device_id": "0x1031af3b",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
self.assertTrue(serializer.is_valid())
def test_registration_id_unique(self):
"""Validate that a duplicate registration id raises a validation error."""
# add a device
serializer = GCMDeviceSerializer(data={
"registration_id": "foobar",
"name": "Galaxy Note 3",
"device_id": "0x1031af3b",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
serializer.is_valid(raise_exception=True)
obj = serializer.save()
# ensure updating the same object works
serializer = GCMDeviceSerializer(obj, data={
"registration_id": "foobar",
"name": "Galaxy Note 5",
"device_id": "0x1031af3b",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
serializer.is_valid(raise_exception=True)
obj = serializer.save()
# try to add a new device with the same token
serializer = GCMDeviceSerializer(data={
"registration_id": "foobar",
"name": "Galaxy Note 3",
"device_id": "0xdeadbeaf",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
with self.assertRaises(ValidationError):
serializer.is_valid(raise_exception=True)
def test_device_id_validation_fail_bad_hex(self):
serializer = GCMDeviceSerializer(data={
"registration_id": "foobar",
"name": "Galaxy Note 3",
"device_id": "0x10r",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
self.assertFalse(serializer.is_valid())
self.assertEqual(serializer.errors, GCM_DRF_INVALID_HEX_ERROR)
def test_device_id_validation_fail_out_of_range(self):
serializer = GCMDeviceSerializer(data={
"registration_id": "foobar",
"name": "Galaxy Note 3",
"device_id": "10000000000000000", # 2**64
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
self.assertFalse(serializer.is_valid())
self.assertEqual(serializer.errors, GCM_DRF_OUT_OF_RANGE_ERROR)
def test_device_id_validation_value_between_signed_unsigned_64b_int_maximums(self):
"""
2**63 < 0xe87a4e72d634997c < 2**64
"""
serializer = GCMDeviceSerializer(data={
"registration_id": "foobar",
"name": "Nexus 5",
"device_id": "e87a4e72d634997c",
"application_id": "XXXXXXXXXXXXXXXXXXXX",
})
self.assertTrue(serializer.is_valid())
| {
"content_hash": "6322d8ba196b26c7270725fb16ea6816",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 89,
"avg_line_length": 32.29545454545455,
"alnum_prop": 0.7112362186253812,
"repo_name": "jleclanche/django-push-notifications",
"id": "5844ea8bc5e17eb3297c629357dc018be8138cc4",
"size": "4263",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tests/test_rest_framework.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "120014"
}
],
"symlink_target": ""
} |
"""Archive manipulation library for the Docker rules."""
import os
from StringIO import StringIO
import tarfile
class SimpleArFile(object):
"""A simple AR file reader.
This enable to read AR file (System V variant) as described
in https://en.wikipedia.org/wiki/Ar_(Unix).
The standard usage of this class is:
with SimpleArFile(filename) as ar:
nextFile = ar.next()
while nextFile:
print nextFile.filename
nextFile = ar.next()
Upon error, this class will raise a ArError exception.
"""
# TODO(dmarting): We should use a standard library instead but python 2.7
# does not have AR reading library.
class ArError(Exception):
pass
class SimpleArFileEntry(object):
"""Represent one entry in a AR archive.
Attributes:
filename: the filename of the entry, as described in the archive.
timestamp: the timestamp of the file entry.
owner_id, group_id: numeric id of the user and group owning the file.
mode: unix permission mode of the file
size: size of the file
data: the content of the file.
"""
def __init__(self, f):
self.filename = f.read(16).strip()
if self.filename.endswith('/'): # SysV variant
self.filename = self.filename[:-1]
self.timestamp = int(f.read(12).strip())
self.owner_id = int(f.read(6).strip())
self.group_id = int(f.read(6).strip())
self.mode = int(f.read(8).strip(), 8)
self.size = int(f.read(10).strip())
if f.read(2) != '\x60\x0a':
raise self.ArError('Invalid AR file header')
self.data = f.read(self.size)
MAGIC_STRING = '!<arch>\n'
def __init__(self, filename):
self.filename = filename
def __enter__(self):
self.f = open(self.filename, 'rb')
if self.f.read(len(self.MAGIC_STRING)) != self.MAGIC_STRING:
raise self.ArError('Not a ar file: ' + self.filename)
return self
def __exit__(self, t, v, traceback):
self.f.close()
def next(self):
"""Read the next file. Returns None when reaching the end of file."""
if self.f.tell() == os.fstat(self.f.fileno()).st_size:
return None
return self.SimpleArFileEntry(self.f)
class TarFileWriter(object):
"""A wrapper to write tar files."""
def __init__(self, name):
self.tar = tarfile.open(name=name, mode='w')
def __enter__(self):
return self
def __exit__(self, t, v, traceback):
self.close()
def add_file(self, name, kind=tarfile.REGTYPE, content=None, link=None,
file_content=None, uid=0, gid=0, uname='', gname='', mtime=0,
mode=None):
"""Add a file to the current tar.
Args:
name: the name of the file to add.
kind: the type of the file to add, see tarfile.*TYPE.
content: a textual content to put in the file.
link: if the file is a link, the destination of the link.
file_content: file to read the content from. Provide either this
one or `content` to specifies a content for the file.
uid: owner user identifier.
gid: owner group identifier.
uname: owner user names.
gname: owner group names.
mtime: modification time to put in the archive.
mode: unix permission mode of the file, default 0644 (0755).
"""
if not name.startswith('.') and not name.startswith('/'):
name = './' + name
tarinfo = tarfile.TarInfo(name)
tarinfo.mtime = mtime
tarinfo.uid = uid
tarinfo.gid = gid
tarinfo.uname = uname
tarinfo.gname = gname
tarinfo.type = kind
if mode is None:
tarinfo.mode = 0644 if kind == tarfile.REGTYPE else 0755
else:
tarinfo.mode = mode
if link:
tarinfo.linkname = link
if content:
tarinfo.size = len(content)
self.tar.addfile(tarinfo, StringIO(content))
elif file_content:
with open(file_content, 'rb') as f:
tarinfo.size = os.fstat(f.fileno()).st_size
self.tar.addfile(tarinfo, f)
else:
self.tar.addfile(tarinfo)
def add_tar(self, tar, rootuid=None, rootgid=None,
numeric=False, name_filter=None):
"""Merge a tar content into the current tar, stripping timestamp.
Args:
tar: the name of tar to extract and put content into the current tar.
rootuid: user id that we will pretend is root (replaced by uid 0).
rootgid: group id that we will pretend is root (replaced by gid 0).
numeric: set to true to strip out name of owners (and just use the
numeric values).
name_filter: filter out file by names. If not none, this method will be
called for each file to add, given the name and should return true if
the file is to be added to the final tar and false otherwise.
"""
compression = os.path.splitext(tar)[-1][1:]
if compression == 'tgz':
compression = 'gz'
elif compression == 'bzip2':
compression = 'bz2'
elif compression not in ['gz', 'bz2']:
# Unfortunately xz format isn't supported in py 2.7 :(
compression = ''
with tarfile.open(name=tar, mode='r:' + compression) as intar:
for tarinfo in intar:
if name_filter is None or name_filter(tarinfo.name):
tarinfo.mtime = 0
if rootuid is not None and tarinfo.uid == rootuid:
tarinfo.uid = 0
tarinfo.uname = 'root'
if rootgid is not None and tarinfo.gid == rootgid:
tarinfo.gid = 0
tarinfo.gname = 'root'
if numeric:
tarinfo.uname = ''
tarinfo.gname = ''
name = tarinfo.name
if not name.startswith('/') and not name.startswith('.'):
tarinfo.name = './' + name
if tarinfo.isfile():
self.tar.addfile(tarinfo, intar.extractfile(tarinfo.name))
else:
self.tar.addfile(tarinfo)
def close(self):
"""Close the output tar file.
This class should not be used anymore after calling that method.
"""
self.tar.close()
| {
"content_hash": "766b74fa1798d0f1be66d9327507dc4a",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 79,
"avg_line_length": 33.05524861878453,
"alnum_prop": 0.6224302189537022,
"repo_name": "charlieaustin/bazel",
"id": "0f9c4288578f999a5dd5c933d956169c6aa19dc5",
"size": "6578",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/build_defs/docker/archive.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "28572"
},
{
"name": "C++",
"bytes": "246268"
},
{
"name": "CSS",
"bytes": "2220"
},
{
"name": "HTML",
"bytes": "24592"
},
{
"name": "Java",
"bytes": "11869988"
},
{
"name": "JavaScript",
"bytes": "12819"
},
{
"name": "Objective-C",
"bytes": "22814"
},
{
"name": "Protocol Buffer",
"bytes": "58611"
},
{
"name": "Python",
"bytes": "86136"
},
{
"name": "Shell",
"bytes": "255244"
}
],
"symlink_target": ""
} |
from datasketch.hyperloglog import HyperLogLog, HyperLogLogPlusPlus
from datasketch.minhash import MinHash
from datasketch.b_bit_minhash import bBitMinHash
from datasketch.lsh import MinHashLSH
from datasketch.inverted_index import InvertedIndex
from datasketch.weighted_minhash import WeightedMinHash, WeightedMinHashGenerator
from datasketch.lshforest import MinHashLSHForest
from datasketch.lshensemble import MinHashLSHEnsemble
from datasketch.lean_minhash import LeanMinHash
# Alias
WeightedMinHashLSH = MinHashLSH
WeightedMinHashLSHForest = MinHashLSHForest
| {
"content_hash": "4907b6bb0affa2a91e1182f6197c7ba0",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 81,
"avg_line_length": 43.46153846153846,
"alnum_prop": 0.8831858407079646,
"repo_name": "arbazkhan002/datasketch",
"id": "6607870f5bb35fd381c774221dada144d20b04b0",
"size": "565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datasketch/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "7801"
},
{
"name": "Python",
"bytes": "239691"
}
],
"symlink_target": ""
} |
from pandac.PandaModules import *
from toontown.toonbase.ToonBaseGlobal import *
from direct.directnotify import DirectNotifyGlobal
from toontown.hood import Place
from direct.showbase import DirectObject
from direct.fsm import StateData
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from direct.task import Task
from otp.distributed.TelemetryLimiter import RotationLimitToH, TLGatherAllAvs
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
class House(Place.Place):
notify = DirectNotifyGlobal.directNotify.newCategory('House')
def __init__(self, loader, avId, parentFSMState, doneEvent):
Place.Place.__init__(self, loader, doneEvent)
self.id = ToontownGlobals.MyEstate
self.ownersAvId = avId
self.dnaFile = 'phase_7/models/modules/toon_interior'
self.isInterior = 1
self.tfaDoneEvent = 'tfaDoneEvent'
self.oldStyle = None
self.fsm = ClassicFSM.ClassicFSM('House', [State.State('start', self.enterStart, self.exitStart, ['doorIn', 'teleportIn', 'tutorial']),
State.State('walk', self.enterWalk, self.exitWalk, ['sit',
'stickerBook',
'doorOut',
'DFA',
'teleportOut',
'quest',
'purchase',
'closet',
'banking',
'phone',
'stopped']),
State.State('sit', self.enterSit, self.exitSit, ['walk']),
State.State('stickerBook', self.enterStickerBook, self.exitStickerBook, ['walk',
'DFA',
'sit',
'doorOut',
'teleportOut',
'quest',
'purchase',
'closet',
'banking',
'phone',
'stopped']),
State.State('DFA', self.enterDFA, self.exitDFA, ['DFAReject', 'teleportOut', 'doorOut']),
State.State('DFAReject', self.enterDFAReject, self.exitDFAReject, ['walk']),
State.State('doorIn', self.enterDoorIn, self.exitDoorIn, ['walk']),
State.State('doorOut', self.enterDoorOut, self.exitDoorOut, ['walk']),
State.State('teleportIn', self.enterTeleportIn, self.exitTeleportIn, ['walk']),
State.State('teleportOut', self.enterTeleportOut, self.exitTeleportOut, ['teleportIn']),
State.State('quest', self.enterQuest, self.exitQuest, ['walk', 'doorOut']),
State.State('tutorial', self.enterTutorial, self.exitTutorial, ['walk', 'quest']),
State.State('purchase', self.enterPurchase, self.exitPurchase, ['walk', 'doorOut']),
State.State('closet', self.enterCloset, self.exitCloset, ['walk']),
State.State('banking', self.enterBanking, self.exitBanking, ['walk']),
State.State('phone', self.enterPhone, self.exitPhone, ['walk']),
State.State('stopped', self.enterStopped, self.exitStopped, ['walk']),
State.State('final', self.enterFinal, self.exitFinal, ['start', 'teleportIn'])], 'start', 'final')
self.parentFSMState = parentFSMState
return
def load(self):
Place.Place.load(self)
self.parentFSMState.addChild(self.fsm)
def unload(self):
Place.Place.unload(self)
self.parentFSMState.removeChild(self.fsm)
del self.parentFSMState
del self.fsm
ModelPool.garbageCollect()
TexturePool.garbageCollect()
def enter(self, requestStatus):
self.zoneId = requestStatus['zoneId']
self.fsm.enterInitialState()
messenger.send('enterHouse')
self.accept('doorDoneEvent', self.handleDoorDoneEvent)
self.accept('DistributedDoor_doorTrigger', self.handleDoorTrigger)
self._telemLimiter = TLGatherAllAvs('House', RotationLimitToH)
NametagGlobals.setWant2dNametags(True)
self.fsm.request(requestStatus['how'], [requestStatus])
def exit(self):
self.ignoreAll()
if hasattr(self, 'fsm'):
self.fsm.requestFinalState()
self._telemLimiter.destroy()
del self._telemLimiter
messenger.send('exitHouse')
NametagGlobals.setWant2dNametags(False)
def setState(self, state):
if hasattr(self, 'fsm'):
self.fsm.request(state)
def getZoneId(self):
return self.zoneId
def enterTutorial(self, requestStatus):
self.fsm.request('walk')
base.localAvatar.b_setParent(ToontownGlobals.SPRender)
globalClock.tick()
base.transitions.irisIn()
messenger.send('enterTutorialInterior')
def exitTutorial(self):
pass
def enterTeleportIn(self, requestStatus):
base.localAvatar.setPosHpr(2.5, 11.5, ToontownGlobals.FloorOffset, 45.0, 0.0, 0.0)
Place.Place.enterTeleportIn(self, requestStatus)
def enterTeleportOut(self, requestStatus):
Place.Place.enterTeleportOut(self, requestStatus, self.__teleportOutDone)
def __teleportOutDone(self, requestStatus):
if hasattr(self, 'fsm'):
self.fsm.requestFinalState()
self.notify.debug('House: teleportOutDone: requestStatus = %s' % requestStatus)
hoodId = requestStatus['hoodId']
zoneId = requestStatus['zoneId']
avId = requestStatus['avId']
shardId = requestStatus['shardId']
if hoodId == ToontownGlobals.MyEstate and zoneId == self.getZoneId():
self.fsm.request('teleportIn', [requestStatus])
elif hoodId == ToontownGlobals.MyEstate:
self.getEstateZoneAndGoHome(requestStatus)
else:
self.doneStatus = requestStatus
messenger.send(self.doneEvent, [self.doneStatus])
def goHomeFailed(self, task):
self.notifyUserGoHomeFailed()
self.ignore('setLocalEstateZone')
self.doneStatus['avId'] = -1
self.doneStatus['zoneId'] = self.getZoneId()
self.fsm.request('teleportIn', [self.doneStatus])
return Task.done
def exitTeleportOut(self):
Place.Place.exitTeleportOut(self)
def enterPurchase(self):
Place.Place.enterPurchase(self)
def exitPurchase(self):
Place.Place.exitPurchase(self)
def enterCloset(self):
base.localAvatar.b_setAnimState('neutral', 1)
self.accept('teleportQuery', self.handleTeleportQuery)
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.laffMeter.start()
base.localAvatar.obscureMoveFurnitureButton(1)
base.localAvatar.startSleepWatch(self.__handleFallingAsleepCloset)
self.enablePeriodTimer()
def __handleFallingAsleepCloset(self, arg):
if hasattr(self, 'fsm'):
self.fsm.request('walk')
messenger.send('closetAsleep')
base.localAvatar.forceGotoSleep()
def exitCloset(self):
base.localAvatar.setTeleportAvailable(0)
self.ignore('teleportQuery')
base.localAvatar.laffMeter.stop()
base.localAvatar.obscureMoveFurnitureButton(-1)
base.localAvatar.stopSleepWatch()
self.disablePeriodTimer()
def enterBanking(self):
Place.Place.enterBanking(self)
def exitBanking(self):
Place.Place.exitBanking(self)
| {
"content_hash": "1ac5ab08b48b0284329a7b604d77f4a2",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 143,
"avg_line_length": 39.353591160220994,
"alnum_prop": 0.6528148252140952,
"repo_name": "linktlh/Toontown-journey",
"id": "ae6e27f39dec92352b8afe899690eca81e239154",
"size": "7123",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "toontown/estate/House.py",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""Test module for starting the broker."""
import os
import sys
from tempfile import mkdtemp
from shutil import rmtree
from subprocess import Popen, PIPE
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from aquilon.config import Config
class TestBrokerStart(unittest.TestCase):
config = None
@classmethod
def setUpClass(cls):
cls.config = Config()
def run_command(self, command, **kwargs):
p = Popen(command, stdout=PIPE, stderr=PIPE, **kwargs)
out, err = p.communicate()
self.assertEqual(p.returncode, 0,
"Command '%s' returned %d:"
"\nSTDOUT:\n@@@\n'%s'\n@@@\n"
"\nSTDERR:\n@@@\n'%s'\n@@@\n"
% (command, p.returncode, out, err))
def teststart(self):
# FIXME: Either remove any old pidfiles, or ignore it as a warning
# from stderr... or IMHO (daqscott) if pid files exist and are knc or
# python processes, kill -9 the pids and delete the files (with a
# warning message it tickles you)
aqd = os.path.join(self.config.get("broker", "srcdir"),
"lib", "aquilon", "unittest_patches.py")
pidfile = os.path.join(self.config.get("broker", "rundir"), "aqd.pid")
logfile = self.config.get("broker", "logfile")
# Specify aqd and options...
args = [sys.executable, aqd,
"--pidfile", pidfile, "--logfile", logfile]
if self.config.has_option("unittest", "profile"):
if self.config.getboolean("unittest", "profile"):
args.append("--profile")
args.append(os.path.join(self.config.get("broker", "logdir"),
"aqd.profile"))
args.append("--profiler=cProfile")
args.append("--savestats")
# And then aqd and options...
args.extend(["aqd", "--config", self.config.baseconfig])
if self.config.has_option("unittest", "coverage"):
if self.config.getboolean("unittest", "coverage"):
args.append("--coveragedir")
dir = os.path.join(self.config.get("broker", "logdir"), "coverage")
args.append(dir)
coveragerc = os.path.join(self.config.get("broker", "srcdir"),
"tests", "coverage.rc")
args.append("--coveragerc")
args.append(coveragerc)
self.run_command(args)
# FIXME: Check that it is listening on the correct port(s)...
# FIXME: If it fails, either cat the log file, or tell the user to try
# running '%s -bn aqd --config %s'%(aqd, self.config.baseconfig)
def testeventsstart(self):
# pidfile = os.path.join(self.config.get('broker', 'rundir'), 'read_events.pid')
read_events = os.path.join(self.config.get('broker', 'srcdir'),
'tests', 'read_events.py')
args = [sys.executable, read_events, '--store', '--daemon',
'--config', self.config.baseconfig]
self.run_command(args)
def testclonetemplateking(self):
source = self.config.get("unittest", "template_base")
if not source:
source = os.path.join(self.config.get("broker", "srcdir"),
"tests/templates")
dest = self.config.get("broker", "kingdir")
rmtree(dest, ignore_errors=True)
env = {}
env["PATH"] = os.environ.get("PATH", "")
git = self.config.lookup_tool("git")
if source.startswith("git://"):
# Easy case - just clone the source repository
start = None
if self.config.has_option("unittest", "template_alternate_prod"):
start = self.config.get("unittest", "template_alternate_prod").strip()
if not start:
start = "prod"
self.run_command([git, "clone", "--bare", "--branch", start,
"--single-branch", source, dest], env=env)
if start != "prod":
self.run_command([git, "branch", "-m", start, "prod"],
env=env, cwd=dest)
else:
# The source is just a directory somewhere. Create a temporary git
# repository which we can then clone
tmpdir = mkdtemp()
self.run_command(["rsync", "-aH", source + "/", tmpdir + "/"],
env=env)
self.run_command([git, "init"], cwd=tmpdir, env=env)
self.run_command([git, "add", "-A"], cwd=tmpdir, env=env)
self.run_command([git, "commit", "-m", "Initial commit"],
cwd=tmpdir, env=env)
# Create the prod branch
self.run_command([git, "checkout", "-b", "prod"], cwd=tmpdir, env=env)
self.run_command([git, "clone", "--bare", tmpdir, dest], env=env)
rmtree(tmpdir, ignore_errors=True)
if self.config.has_option("broker", "trash_branch"):
trash_branch = self.config.get("broker", "trash_branch")
self.run_command([git, "branch", trash_branch, "prod"],
env=env, cwd=dest)
# Set the default branch
self.run_command([git, "symbolic-ref", "HEAD", "refs/heads/prod"],
env=env, cwd=dest)
def testdisabletemplatetests(self):
kingdir = self.config.get("broker", "kingdir")
rundir = self.config.get("broker", "rundir")
git = self.config.lookup_tool("git")
env = {}
env["PATH"] = os.environ.get("PATH", "")
tempdir = mkdtemp(prefix="fixup", dir=rundir)
self.run_command([git, "clone", "--shared", kingdir, "template-king",
"--branch", "prod"], cwd=tempdir, env=env)
repodir = os.path.join(tempdir, "template-king")
if os.path.exists(os.path.join(repodir, "t", "Makefile")):
self.run_command([git, "rm", "-f", os.path.join("t", "Makefile")],
cwd=repodir, env=env)
self.run_command([git, "commit", "-m", "Removed t/Makefile"],
cwd=repodir, env=env)
for branch in ['prod']:
self.run_command([git, "push", "origin", "prod:%s" % branch],
cwd=repodir, env=env)
rmtree(tempdir, ignore_errors=True)
def testsetuppanclinks(self):
# Some tests want multiple versions of panc.jar available. Nothing
# depends on the behavior being different, so a couple symlinks will do
# the job
real_panc = self.config.get("unittest", "real_panc_location")
fake_panc_default = self.config.get("panc", "pan_compiler")
fake_panc_utpanc = self.config.get("panc", "pan_compiler",
vars={'version': 'utpanc'})
fake_panc_dir = os.path.dirname(fake_panc_default)
if not os.path.exists(fake_panc_dir):
os.makedirs(fake_panc_dir, 0o755)
for dst in (fake_panc_default, fake_panc_utpanc):
try:
os.unlink(dst)
except OSError:
pass
os.symlink(real_panc, dst)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestBrokerStart)
unittest.TextTestRunner(verbosity=2).run(suite)
| {
"content_hash": "72638570f6de43a198a3b08d92ce823b",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 88,
"avg_line_length": 40.68648648648649,
"alnum_prop": 0.5354058721934369,
"repo_name": "guillaume-philippon/aquilon",
"id": "afd233cf5b218a92f4ed6e3dab642dc7bca021b1",
"size": "8277",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/broker/test_start.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "3791"
},
{
"name": "Makefile",
"bytes": "5024"
},
{
"name": "Mako",
"bytes": "3996"
},
{
"name": "PLSQL",
"bytes": "69088"
},
{
"name": "Perl",
"bytes": "5030"
},
{
"name": "Python",
"bytes": "4257490"
},
{
"name": "SQLPL",
"bytes": "869"
},
{
"name": "Shell",
"bytes": "22083"
}
],
"symlink_target": ""
} |
import asyncio
import base64
import datetime
import re
import uuid
from contextlib import contextmanager
from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
from unittest import mock, skipUnless
from urllib import parse
import aioapns
import orjson
import responses
from django.conf import settings
from django.db import transaction
from django.db.models import F, Q
from django.http.response import ResponseHeaders
from django.test import override_settings
from django.utils.crypto import get_random_string
from django.utils.timezone import now
from requests.exceptions import ConnectionError
from requests.models import PreparedRequest
from analytics.lib.counts import CountStat, LoggingCountStat
from analytics.models import InstallationCount, RealmCount
from zerver.actions.message_delete import do_delete_messages
from zerver.actions.message_flags import do_mark_stream_messages_as_read, do_update_message_flags
from zerver.actions.user_settings import do_regenerate_api_key
from zerver.lib.avatar import absolute_avatar_url
from zerver.lib.exceptions import JsonableError
from zerver.lib.push_notifications import (
APNsContext,
DeviceToken,
UserPushIndentityCompat,
b64_to_hex,
get_apns_badge_count,
get_apns_badge_count_future,
get_apns_context,
get_message_payload_apns,
get_message_payload_gcm,
get_mobile_push_content,
handle_push_notification,
handle_remove_push_notification,
hex_to_b64,
modernize_apns_payload,
parse_gcm_options,
send_android_push_notification_to_user,
send_apple_push_notification,
send_notifications_to_bouncer,
)
from zerver.lib.remote_server import (
PushNotificationBouncerException,
PushNotificationBouncerRetryLaterError,
build_analytics_data,
send_analytics_to_remote_server,
send_to_push_bouncer,
)
from zerver.lib.response import json_response_from_error
from zerver.lib.soft_deactivation import do_soft_deactivate_users
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import mock_queue_publish
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.lib.user_groups import create_user_group
from zerver.models import (
Message,
NotificationTriggers,
PushDeviceToken,
RealmAuditLog,
Recipient,
Stream,
Subscription,
UserMessage,
get_client,
get_display_recipient,
get_realm,
get_stream,
get_user_profile_by_id,
)
from zilencer.models import RemoteZulipServerAuditLog
if settings.ZILENCER_ENABLED:
from zilencer.models import (
RemoteInstallationCount,
RemotePushDeviceToken,
RemoteRealmAuditLog,
RemoteRealmCount,
RemoteZulipServer,
)
@skipUnless(settings.ZILENCER_ENABLED, "requires zilencer")
class BouncerTestCase(ZulipTestCase):
def setUp(self) -> None:
self.server_uuid = "6cde5f7a-1f7e-4978-9716-49f69ebfc9fe"
self.server = RemoteZulipServer(
uuid=self.server_uuid,
api_key="magic_secret_api_key",
hostname="demo.example.com",
last_updated=now(),
)
self.server.save()
super().setUp()
def tearDown(self) -> None:
RemoteZulipServer.objects.filter(uuid=self.server_uuid).delete()
super().tearDown()
def request_callback(self, request: PreparedRequest) -> Tuple[int, ResponseHeaders, bytes]:
assert isinstance(request.body, str) or request.body is None
params: Dict[str, List[str]] = parse.parse_qs(request.body)
# In Python 3, the values of the dict from `parse_qs` are
# in a list, because there might be multiple values.
# But since we are sending values with no same keys, hence
# we can safely pick the first value.
data = {k: v[0] for k, v in params.items()}
assert request.url is not None # allow mypy to infer url is present.
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
local_url = request.url.replace(settings.PUSH_NOTIFICATION_BOUNCER_URL, "")
if request.method == "POST":
result = self.uuid_post(self.server_uuid, local_url, data, subdomain="")
elif request.method == "GET":
result = self.uuid_get(self.server_uuid, local_url, data, subdomain="")
return (result.status_code, result.headers, result.content)
def add_mock_response(self) -> None:
# Match any endpoint with the PUSH_NOTIFICATION_BOUNCER_URL.
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
COMPILED_URL = re.compile(settings.PUSH_NOTIFICATION_BOUNCER_URL + ".*")
responses.add_callback(responses.POST, COMPILED_URL, callback=self.request_callback)
responses.add_callback(responses.GET, COMPILED_URL, callback=self.request_callback)
def get_generic_payload(self, method: str = "register") -> Dict[str, Any]:
user_id = 10
token = "111222"
token_kind = PushDeviceToken.GCM
return {"user_id": user_id, "token": token, "token_kind": token_kind}
class PushBouncerNotificationTest(BouncerTestCase):
DEFAULT_SUBDOMAIN = ""
def test_unregister_remote_push_user_params(self) -> None:
token = "111222"
token_kind = PushDeviceToken.GCM
endpoint = "/api/v1/remotes/push/unregister"
result = self.uuid_post(self.server_uuid, endpoint, {"token_kind": token_kind})
self.assert_json_error(result, "Missing 'token' argument")
result = self.uuid_post(self.server_uuid, endpoint, {"token": token})
self.assert_json_error(result, "Missing 'token_kind' argument")
# We need the root ('') subdomain to be in use for this next
# test, since the push bouncer API is only available there:
hamlet = self.example_user("hamlet")
realm = get_realm("zulip")
realm.string_id = ""
realm.save()
result = self.api_post(
hamlet,
endpoint,
dict(user_id=15, token=token, token_kind=token_kind),
subdomain="",
)
self.assert_json_error(result, "Must validate with valid Zulip server API key")
# Try with deactivated remote servers
self.server.deactivated = True
self.server.save()
result = self.uuid_post(self.server_uuid, endpoint, self.get_generic_payload("unregister"))
self.assert_json_error_contains(
result,
"The mobile push notification service registration for your server has been deactivated",
401,
)
def test_register_remote_push_user_params(self) -> None:
token = "111222"
user_id = 11
token_kind = PushDeviceToken.GCM
endpoint = "/api/v1/remotes/push/register"
result = self.uuid_post(
self.server_uuid, endpoint, {"user_id": user_id, "token_kind": token_kind}
)
self.assert_json_error(result, "Missing 'token' argument")
result = self.uuid_post(self.server_uuid, endpoint, {"user_id": user_id, "token": token})
self.assert_json_error(result, "Missing 'token_kind' argument")
result = self.uuid_post(
self.server_uuid, endpoint, {"token": token, "token_kind": token_kind}
)
self.assert_json_error(result, "Missing user_id or user_uuid")
result = self.uuid_post(
self.server_uuid,
endpoint,
{"user_id": user_id, "user_uuid": "xxx", "token": token, "token_kind": token_kind},
)
self.assert_json_error(result, "Specify only one of user_id or user_uuid")
result = self.uuid_post(
self.server_uuid, endpoint, {"user_id": user_id, "token": token, "token_kind": 17}
)
self.assert_json_error(result, "Invalid token type")
hamlet = self.example_user("hamlet")
# We need the root ('') subdomain to be in use for this next
# test, since the push bouncer API is only available there:
realm = get_realm("zulip")
realm.string_id = ""
realm.save()
result = self.api_post(
hamlet,
endpoint,
dict(user_id=user_id, token_kind=token_kind, token=token),
)
self.assert_json_error(result, "Must validate with valid Zulip server API key")
result = self.uuid_post(
self.server_uuid,
endpoint,
dict(user_id=user_id, token_kind=token_kind, token=token),
subdomain="zulip",
)
self.assert_json_error(
result, "Invalid subdomain for push notifications bouncer", status_code=401
)
# We do a bit of hackery here to the API_KEYS cache just to
# make the code simple for sending an incorrect API key.
self.API_KEYS[self.server_uuid] = "invalid"
result = self.uuid_post(
self.server_uuid, endpoint, dict(user_id=user_id, token_kind=token_kind, token=token)
)
self.assert_json_error(
result,
"Zulip server auth failure: key does not match role 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe",
status_code=401,
)
del self.API_KEYS[self.server_uuid]
self.API_KEYS["invalid_uuid"] = "invalid"
result = self.uuid_post(
"invalid_uuid",
endpoint,
dict(user_id=user_id, token_kind=token_kind, token=token),
subdomain="zulip",
)
self.assert_json_error(
result,
"Zulip server auth failure: invalid_uuid is not registered -- did you run `manage.py register_server`?",
status_code=401,
)
del self.API_KEYS["invalid_uuid"]
credentials_uuid = str(uuid.uuid4())
credentials = "{}:{}".format(credentials_uuid, "invalid")
api_auth = "Basic " + base64.b64encode(credentials.encode()).decode()
result = self.client_post(
endpoint,
{"user_id": user_id, "token_kind": token_kind, "token": token},
HTTP_AUTHORIZATION=api_auth,
)
self.assert_json_error(
result,
f"Zulip server auth failure: {credentials_uuid} is not registered -- did you run `manage.py register_server`?",
status_code=401,
)
# Try with deactivated remote servers
self.server.deactivated = True
self.server.save()
result = self.uuid_post(self.server_uuid, endpoint, self.get_generic_payload("register"))
self.assert_json_error_contains(
result,
"The mobile push notification service registration for your server has been deactivated",
401,
)
def test_remote_push_user_endpoints(self) -> None:
endpoints = [
("/api/v1/remotes/push/register", "register"),
("/api/v1/remotes/push/unregister", "unregister"),
]
for endpoint, method in endpoints:
payload = self.get_generic_payload(method)
# Verify correct results are success
result = self.uuid_post(self.server_uuid, endpoint, payload)
self.assert_json_success(result)
remote_tokens = RemotePushDeviceToken.objects.filter(token=payload["token"])
token_count = 1 if method == "register" else 0
self.assert_length(remote_tokens, token_count)
# Try adding/removing tokens that are too big...
broken_token = "x" * 5000 # too big
payload["token"] = broken_token
result = self.uuid_post(self.server_uuid, endpoint, payload)
self.assert_json_error(result, "Empty or invalid length token")
def test_send_notification_endpoint(self) -> None:
hamlet = self.example_user("hamlet")
server = RemoteZulipServer.objects.get(uuid=self.server_uuid)
token = "aaaa"
android_tokens = []
for i in ["aa", "bb"]:
android_tokens.append(
RemotePushDeviceToken.objects.create(
kind=RemotePushDeviceToken.GCM,
token=hex_to_b64(token + i),
user_id=hamlet.id,
server=server,
)
)
apple_token = RemotePushDeviceToken.objects.create(
kind=RemotePushDeviceToken.APNS,
token=hex_to_b64(token),
user_id=hamlet.id,
server=server,
)
many_ids = ",".join(str(i) for i in range(1, 250))
payload = {
"user_id": hamlet.id,
"gcm_payload": {"event": "remove", "zulip_message_ids": many_ids},
"apns_payload": {
"badge": 0,
"custom": {"zulip": {"event": "remove", "zulip_message_ids": many_ids}},
},
"gcm_options": {},
}
with mock.patch(
"zilencer.views.send_android_push_notification"
) as android_push, mock.patch(
"zilencer.views.send_apple_push_notification"
) as apple_push, self.assertLogs(
"zilencer.views", level="INFO"
) as logger:
result = self.uuid_post(
self.server_uuid,
"/api/v1/remotes/push/notify",
payload,
content_type="application/json",
)
data = self.assert_json_success(result)
self.assertEqual(
{"result": "success", "msg": "", "total_android_devices": 2, "total_apple_devices": 1},
data,
)
self.assertEqual(
logger.output,
[
"INFO:zilencer.views:"
f"Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id:{hamlet.id}>: "
"2 via FCM devices, 1 via APNs devices"
],
)
user_identity = UserPushIndentityCompat(user_id=hamlet.id)
apple_push.assert_called_once_with(
user_identity,
[apple_token],
{
"badge": 0,
"custom": {
"zulip": {
"event": "remove",
"zulip_message_ids": ",".join(str(i) for i in range(50, 250)),
}
},
},
remote=server,
)
android_push.assert_called_once_with(
user_identity,
list(reversed(android_tokens)),
{"event": "remove", "zulip_message_ids": ",".join(str(i) for i in range(50, 250))},
{},
remote=server,
)
def test_remote_push_unregister_all(self) -> None:
payload = self.get_generic_payload("register")
# Verify correct results are success
result = self.uuid_post(self.server_uuid, "/api/v1/remotes/push/register", payload)
self.assert_json_success(result)
remote_tokens = RemotePushDeviceToken.objects.filter(token=payload["token"])
self.assert_length(remote_tokens, 1)
result = self.uuid_post(
self.server_uuid, "/api/v1/remotes/push/unregister/all", dict(user_id=10)
)
self.assert_json_success(result)
remote_tokens = RemotePushDeviceToken.objects.filter(token=payload["token"])
self.assert_length(remote_tokens, 0)
def test_invalid_apns_token(self) -> None:
endpoints = [
("/api/v1/remotes/push/register", "apple-token"),
]
for endpoint, method in endpoints:
payload = {
"user_id": 10,
"token": "xyz uses non-hex characters",
"token_kind": PushDeviceToken.APNS,
}
result = self.uuid_post(self.server_uuid, endpoint, payload)
self.assert_json_error(result, "Invalid APNS token")
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_push_bouncer_api(self) -> None:
"""This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
self.add_mock_response()
user = self.example_user("cordelia")
self.login_user(user)
server = RemoteZulipServer.objects.get(uuid=self.server_uuid)
endpoints = [
("/json/users/me/apns_device_token", "apple-tokenaz", RemotePushDeviceToken.APNS),
("/json/users/me/android_gcm_reg_id", "android-token", RemotePushDeviceToken.GCM),
]
# Test error handling
for endpoint, token, kind in endpoints:
# Try adding/removing tokens that are too big...
broken_token = "a" * 5000 # too big
result = self.client_post(
endpoint, {"token": broken_token, "token_kind": kind}, subdomain="zulip"
)
self.assert_json_error(result, "Empty or invalid length token")
result = self.client_delete(
endpoint, {"token": broken_token, "token_kind": kind}, subdomain="zulip"
)
self.assert_json_error(result, "Empty or invalid length token")
# Try to remove a non-existent token...
result = self.client_delete(
endpoint, {"token": "abcd1234", "token_kind": kind}, subdomain="zulip"
)
self.assert_json_error(result, "Token does not exist")
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
URL = settings.PUSH_NOTIFICATION_BOUNCER_URL + "/api/v1/remotes/push/register"
with responses.RequestsMock() as resp, self.assertLogs(level="ERROR") as error_log:
resp.add(responses.POST, URL, body=ConnectionError(), status=502)
result = self.client_post(endpoint, {"token": token}, subdomain="zulip")
self.assert_json_error(
result,
"ConnectionError while trying to connect to push notification bouncer",
502,
)
self.assertIn(
f"ERROR:django.request:Bad Gateway: {endpoint}\nTraceback",
error_log.output[0],
)
with responses.RequestsMock() as resp, self.assertLogs(level="WARNING") as warn_log:
resp.add(responses.POST, URL, body=orjson.dumps({"msg": "error"}), status=500)
result = self.client_post(endpoint, {"token": token}, subdomain="zulip")
self.assert_json_error(result, "Received 500 from push notification bouncer", 502)
self.assertEqual(
warn_log.output[0],
"WARNING:root:Received 500 from push notification bouncer",
)
self.assertIn(
f"ERROR:django.request:Bad Gateway: {endpoint}\nTraceback", warn_log.output[1]
)
# Add tokens
for endpoint, token, kind in endpoints:
# Test that we can push twice
result = self.client_post(endpoint, {"token": token}, subdomain="zulip")
self.assert_json_success(result)
result = self.client_post(endpoint, {"token": token}, subdomain="zulip")
self.assert_json_success(result)
tokens = list(
RemotePushDeviceToken.objects.filter(
user_uuid=user.uuid, token=token, server=server
)
)
self.assert_length(tokens, 1)
self.assertEqual(tokens[0].token, token)
# User should have tokens for both devices now.
tokens = list(RemotePushDeviceToken.objects.filter(user_uuid=user.uuid, server=server))
self.assert_length(tokens, 2)
# Remove tokens
for endpoint, token, kind in endpoints:
result = self.client_delete(endpoint, {"token": token}, subdomain="zulip")
self.assert_json_success(result)
tokens = list(
RemotePushDeviceToken.objects.filter(
user_uuid=user.uuid, token=token, server=server
)
)
self.assert_length(tokens, 0)
# Re-add copies of those tokens
for endpoint, token, kind in endpoints:
result = self.client_post(endpoint, {"token": token}, subdomain="zulip")
self.assert_json_success(result)
tokens = list(RemotePushDeviceToken.objects.filter(user_uuid=user.uuid, server=server))
self.assert_length(tokens, 2)
# Now we want to remove them using the bouncer after an API key change.
# First we test error handling in case of issues with the bouncer:
with mock.patch(
"zerver.worker.queue_processors.clear_push_device_tokens",
side_effect=PushNotificationBouncerRetryLaterError("test"),
), mock.patch("zerver.worker.queue_processors.retry_event") as mock_retry:
do_regenerate_api_key(user, user)
mock_retry.assert_called()
# We didn't manage to communicate with the bouncer, to the tokens are still there:
tokens = list(RemotePushDeviceToken.objects.filter(user_uuid=user.uuid, server=server))
self.assert_length(tokens, 2)
# Now we successfully remove them:
do_regenerate_api_key(user, user)
tokens = list(RemotePushDeviceToken.objects.filter(user_uuid=user.uuid, server=server))
self.assert_length(tokens, 0)
class AnalyticsBouncerTest(BouncerTestCase):
TIME_ZERO = datetime.datetime(1988, 3, 14, tzinfo=datetime.timezone.utc)
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_analytics_api(self) -> None:
"""This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
ANALYTICS_URL = settings.PUSH_NOTIFICATION_BOUNCER_URL + "/api/v1/remotes/server/analytics"
ANALYTICS_STATUS_URL = ANALYTICS_URL + "/status"
user = self.example_user("hamlet")
end_time = self.TIME_ZERO
with responses.RequestsMock() as resp, self.assertLogs(level="WARNING") as mock_warning:
resp.add(responses.GET, ANALYTICS_STATUS_URL, body=ConnectionError())
send_analytics_to_remote_server()
self.assertIn(
"WARNING:root:ConnectionError while trying to connect to push notification bouncer\nTraceback ",
mock_warning.output[0],
)
self.assertTrue(resp.assert_call_count(ANALYTICS_STATUS_URL, 1))
self.add_mock_response()
# Send any existing data over, so that we can start the test with a "clean" slate
audit_log = RealmAuditLog.objects.all().order_by("id").last()
assert audit_log is not None
audit_log_max_id = audit_log.id
send_analytics_to_remote_server()
self.assertTrue(responses.assert_call_count(ANALYTICS_STATUS_URL, 1))
remote_audit_log_count = RemoteRealmAuditLog.objects.count()
self.assertEqual(RemoteRealmCount.objects.count(), 0)
self.assertEqual(RemoteInstallationCount.objects.count(), 0)
def check_counts(
analytics_status_mock_request_call_count: int,
analytics_mock_request_call_count: int,
remote_realm_count: int,
remote_installation_count: int,
remote_realm_audit_log: int,
) -> None:
self.assertTrue(
responses.assert_call_count(
ANALYTICS_STATUS_URL, analytics_status_mock_request_call_count
)
)
self.assertTrue(
responses.assert_call_count(ANALYTICS_URL, analytics_mock_request_call_count)
)
self.assertEqual(RemoteRealmCount.objects.count(), remote_realm_count)
self.assertEqual(RemoteInstallationCount.objects.count(), remote_installation_count)
self.assertEqual(
RemoteRealmAuditLog.objects.count(), remote_audit_log_count + remote_realm_audit_log
)
# Create some rows we'll send to remote server
realm_stat = LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY)
RealmCount.objects.create(
realm=user.realm, property=realm_stat.property, end_time=end_time, value=5
)
InstallationCount.objects.create(
property=realm_stat.property,
end_time=end_time,
value=5,
# We set a subgroup here to work around:
# https://github.com/zulip/zulip/issues/12362
subgroup="test_subgroup",
)
# Event type in SYNCED_BILLING_EVENTS -- should be included
RealmAuditLog.objects.create(
realm=user.realm,
modified_user=user,
event_type=RealmAuditLog.USER_CREATED,
event_time=end_time,
extra_data="data",
)
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
RealmAuditLog.objects.create(
realm=user.realm,
modified_user=user,
event_type=RealmAuditLog.REALM_LOGO_CHANGED,
event_time=end_time,
extra_data="data",
)
self.assertEqual(RealmCount.objects.count(), 1)
self.assertEqual(InstallationCount.objects.count(), 1)
self.assertEqual(RealmAuditLog.objects.filter(id__gt=audit_log_max_id).count(), 2)
send_analytics_to_remote_server()
check_counts(2, 2, 1, 1, 1)
# Test having no new rows
send_analytics_to_remote_server()
check_counts(3, 2, 1, 1, 1)
# Test only having new RealmCount rows
RealmCount.objects.create(
realm=user.realm,
property=realm_stat.property,
end_time=end_time + datetime.timedelta(days=1),
value=6,
)
RealmCount.objects.create(
realm=user.realm,
property=realm_stat.property,
end_time=end_time + datetime.timedelta(days=2),
value=9,
)
send_analytics_to_remote_server()
check_counts(4, 3, 3, 1, 1)
# Test only having new InstallationCount rows
InstallationCount.objects.create(
property=realm_stat.property, end_time=end_time + datetime.timedelta(days=1), value=6
)
send_analytics_to_remote_server()
check_counts(5, 4, 3, 2, 1)
# Test only having new RealmAuditLog rows
# Non-synced event
RealmAuditLog.objects.create(
realm=user.realm,
modified_user=user,
event_type=RealmAuditLog.REALM_LOGO_CHANGED,
event_time=end_time,
extra_data="data",
)
send_analytics_to_remote_server()
check_counts(6, 4, 3, 2, 1)
# Synced event
RealmAuditLog.objects.create(
realm=user.realm,
modified_user=user,
event_type=RealmAuditLog.USER_REACTIVATED,
event_time=end_time,
extra_data="data",
)
send_analytics_to_remote_server()
check_counts(7, 5, 3, 2, 2)
(realm_count_data, installation_count_data, realmauditlog_data) = build_analytics_data(
RealmCount.objects.all(), InstallationCount.objects.all(), RealmAuditLog.objects.all()
)
result = self.uuid_post(
self.server_uuid,
"/api/v1/remotes/server/analytics",
{
"realm_counts": orjson.dumps(realm_count_data).decode(),
"installation_counts": orjson.dumps(installation_count_data).decode(),
"realmauditlog_rows": orjson.dumps(realmauditlog_data).decode(),
},
subdomain="",
)
self.assert_json_error(result, "Data is out of order.")
with mock.patch("zilencer.views.validate_incoming_table_data"), self.assertLogs(
level="WARNING"
) as warn_log:
# We need to wrap a transaction here to avoid the
# IntegrityError that will be thrown in here from breaking
# the unittest transaction.
with transaction.atomic():
result = self.uuid_post(
self.server_uuid,
"/api/v1/remotes/server/analytics",
{
"realm_counts": orjson.dumps(realm_count_data).decode(),
"installation_counts": orjson.dumps(installation_count_data).decode(),
"realmauditlog_rows": orjson.dumps(realmauditlog_data).decode(),
},
subdomain="",
)
self.assert_json_error(result, "Invalid data.")
self.assertEqual(
warn_log.output,
[
"WARNING:root:Invalid data saving zilencer_remoteinstallationcount for server demo.example.com/6cde5f7a-1f7e-4978-9716-49f69ebfc9fe"
],
)
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_analytics_api_invalid(self) -> None:
"""This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
self.add_mock_response()
user = self.example_user("hamlet")
end_time = self.TIME_ZERO
realm_stat = LoggingCountStat("invalid count stat", RealmCount, CountStat.DAY)
RealmCount.objects.create(
realm=user.realm, property=realm_stat.property, end_time=end_time, value=5
)
self.assertEqual(RealmCount.objects.count(), 1)
self.assertEqual(RemoteRealmCount.objects.count(), 0)
with self.assertLogs(level="WARNING") as m:
send_analytics_to_remote_server()
self.assertEqual(m.output, ["WARNING:root:Invalid property invalid count stat"])
self.assertEqual(RemoteRealmCount.objects.count(), 0)
# Servers on Zulip 2.0.6 and earlier only send realm_counts and installation_counts data,
# and don't send realmauditlog_rows. Make sure that continues to work.
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_old_two_table_format(self) -> None:
self.add_mock_response()
# Send fixture generated with Zulip 2.0 code
send_to_push_bouncer(
"POST",
"server/analytics",
{
"realm_counts": '[{"id":1,"property":"invites_sent::day","subgroup":null,"end_time":574300800.0,"value":5,"realm":2}]',
"installation_counts": "[]",
"version": '"2.0.6+git"',
},
)
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
ANALYTICS_URL = settings.PUSH_NOTIFICATION_BOUNCER_URL + "/api/v1/remotes/server/analytics"
self.assertTrue(responses.assert_call_count(ANALYTICS_URL, 1))
self.assertEqual(RemoteRealmCount.objects.count(), 1)
self.assertEqual(RemoteInstallationCount.objects.count(), 0)
self.assertEqual(RemoteRealmAuditLog.objects.count(), 0)
# Make sure we aren't sending data we don't mean to, even if we don't store it.
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_only_sending_intended_realmauditlog_data(self) -> None:
self.add_mock_response()
user = self.example_user("hamlet")
# Event type in SYNCED_BILLING_EVENTS -- should be included
RealmAuditLog.objects.create(
realm=user.realm,
modified_user=user,
event_type=RealmAuditLog.USER_REACTIVATED,
event_time=self.TIME_ZERO,
extra_data="data",
)
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
RealmAuditLog.objects.create(
realm=user.realm,
modified_user=user,
event_type=RealmAuditLog.REALM_LOGO_CHANGED,
event_time=self.TIME_ZERO,
extra_data="data",
)
# send_analytics_to_remote_server calls send_to_push_bouncer twice.
# We need to distinguish the first and second calls.
first_call = True
def check_for_unwanted_data(*args: Any) -> Any:
nonlocal first_call
if first_call:
first_call = False
else:
# Test that we're respecting SYNCED_BILLING_EVENTS
self.assertIn(f'"event_type":{RealmAuditLog.USER_REACTIVATED}', str(args))
self.assertNotIn(f'"event_type":{RealmAuditLog.REALM_LOGO_CHANGED}', str(args))
# Test that we're respecting REALMAUDITLOG_PUSHED_FIELDS
self.assertIn("backfilled", str(args))
self.assertNotIn("modified_user", str(args))
return send_to_push_bouncer(*args)
with mock.patch(
"zerver.lib.remote_server.send_to_push_bouncer", side_effect=check_for_unwanted_data
):
send_analytics_to_remote_server()
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_realmauditlog_data_mapping(self) -> None:
self.add_mock_response()
user = self.example_user("hamlet")
log_entry = RealmAuditLog.objects.create(
realm=user.realm,
modified_user=user,
backfilled=True,
event_type=RealmAuditLog.USER_REACTIVATED,
event_time=self.TIME_ZERO,
extra_data="data",
)
send_analytics_to_remote_server()
remote_log_entry = RemoteRealmAuditLog.objects.order_by("id").last()
assert remote_log_entry is not None
self.assertEqual(str(remote_log_entry.server.uuid), self.server_uuid)
self.assertEqual(remote_log_entry.remote_id, log_entry.id)
self.assertEqual(remote_log_entry.event_time, self.TIME_ZERO)
self.assertEqual(remote_log_entry.backfilled, True)
self.assertEqual(remote_log_entry.extra_data, "data")
self.assertEqual(remote_log_entry.event_type, RealmAuditLog.USER_REACTIVATED)
class PushNotificationTest(BouncerTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user("hamlet")
self.sending_client = get_client("test")
self.sender = self.example_user("hamlet")
def get_message(self, type: int, type_id: int = 100) -> Message:
recipient, _ = Recipient.objects.get_or_create(
type_id=type_id,
type=type,
)
message = Message(
sender=self.sender,
recipient=recipient,
content="This is test content",
rendered_content="This is test content",
date_sent=now(),
sending_client=self.sending_client,
)
message.set_topic_name("Test topic")
message.save()
return message
@contextmanager
def mock_apns(self) -> Iterator[APNsContext]:
apns_context = APNsContext(apns=mock.Mock(), loop=asyncio.new_event_loop())
try:
with mock.patch("zerver.lib.push_notifications.get_apns_context") as mock_get:
mock_get.return_value = apns_context
yield apns_context
finally:
apns_context.loop.close()
def setup_apns_tokens(self) -> None:
self.tokens = ["aaaa", "bbbb"]
for token in self.tokens:
PushDeviceToken.objects.create(
kind=PushDeviceToken.APNS,
token=hex_to_b64(token),
user=self.user_profile,
ios_app_id=settings.ZULIP_IOS_APP_ID,
)
self.remote_tokens = [("cccc", "ffff")]
for id_token, uuid_token in self.remote_tokens:
# We want to set up both types of RemotePushDeviceToken here:
# the legacy one with user_id and the new with user_uuid.
# This allows tests to work with either, without needing to
# do their own setup.
RemotePushDeviceToken.objects.create(
kind=RemotePushDeviceToken.APNS,
token=hex_to_b64(id_token),
user_id=self.user_profile.id,
server=RemoteZulipServer.objects.get(uuid=self.server_uuid),
)
RemotePushDeviceToken.objects.create(
kind=RemotePushDeviceToken.APNS,
token=hex_to_b64(uuid_token),
user_uuid=self.user_profile.uuid,
server=RemoteZulipServer.objects.get(uuid=self.server_uuid),
)
def setup_gcm_tokens(self) -> None:
self.gcm_tokens = ["1111", "2222"]
for token in self.gcm_tokens:
PushDeviceToken.objects.create(
kind=PushDeviceToken.GCM,
token=hex_to_b64(token),
user=self.user_profile,
ios_app_id=None,
)
self.remote_gcm_tokens = [("dddd", "eeee")]
for id_token, uuid_token in self.remote_gcm_tokens:
RemotePushDeviceToken.objects.create(
kind=RemotePushDeviceToken.GCM,
token=hex_to_b64(id_token),
user_id=self.user_profile.id,
server=RemoteZulipServer.objects.get(uuid=self.server_uuid),
)
RemotePushDeviceToken.objects.create(
kind=RemotePushDeviceToken.GCM,
token=hex_to_b64(uuid_token),
user_uuid=self.user_profile.uuid,
server=RemoteZulipServer.objects.get(uuid=self.server_uuid),
)
class HandlePushNotificationTest(PushNotificationTest):
DEFAULT_SUBDOMAIN = ""
def request_callback(self, request: PreparedRequest) -> Tuple[int, ResponseHeaders, bytes]:
assert request.url is not None # allow mypy to infer url is present.
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
local_url = request.url.replace(settings.PUSH_NOTIFICATION_BOUNCER_URL, "")
assert isinstance(request.body, bytes)
result = self.uuid_post(
self.server_uuid, local_url, request.body, content_type="application/json"
)
return (result.status_code, result.headers, result.content)
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_end_to_end(self) -> None:
self.add_mock_response()
self.setup_apns_tokens()
self.setup_gcm_tokens()
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=self.user_profile,
message=message,
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
with mock.patch(
"zerver.lib.push_notifications.gcm_client"
) as mock_gcm, self.mock_apns() as apns_context, self.assertLogs(
"zerver.lib.push_notifications", level="INFO"
) as pn_logger, self.assertLogs(
"zilencer.views", level="INFO"
) as views_logger:
apns_devices = [
(b64_to_hex(device.token), device.ios_app_id, device.token)
for device in RemotePushDeviceToken.objects.filter(kind=PushDeviceToken.APNS)
]
gcm_devices = [
(b64_to_hex(device.token), device.ios_app_id, device.token)
for device in RemotePushDeviceToken.objects.filter(kind=PushDeviceToken.GCM)
]
mock_gcm.json_request.return_value = {
"success": {device[2]: message.id for device in gcm_devices}
}
apns_context.apns.send_notification = mock.AsyncMock()
apns_context.apns.send_notification.return_value.is_successful = True
handle_push_notification(self.user_profile.id, missed_message)
self.assertEqual(
views_logger.output,
[
"INFO:zilencer.views:"
f"Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id:{str(self.user_profile.id)}><uuid:{str(self.user_profile.uuid)}>: "
f"{len(gcm_devices)} via FCM devices, {len(apns_devices)} via APNs devices"
],
)
for _, _, token in apns_devices:
self.assertIn(
"INFO:zerver.lib.push_notifications:"
f"APNs: Success sending for user <id:{str(self.user_profile.id)}><uuid:{str(self.user_profile.uuid)}> to device {token}",
pn_logger.output,
)
for _, _, token in gcm_devices:
self.assertIn(
"INFO:zerver.lib.push_notifications:" f"GCM: Sent {token} as {message.id}",
pn_logger.output,
)
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_unregistered_client(self) -> None:
self.add_mock_response()
self.setup_apns_tokens()
self.setup_gcm_tokens()
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=self.user_profile,
message=message,
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
with mock.patch(
"zerver.lib.push_notifications.gcm_client"
) as mock_gcm, self.mock_apns() as apns_context, self.assertLogs(
"zerver.lib.push_notifications", level="INFO"
) as pn_logger, self.assertLogs(
"zilencer.views", level="INFO"
) as views_logger:
apns_devices = [
(b64_to_hex(device.token), device.ios_app_id, device.token)
for device in RemotePushDeviceToken.objects.filter(kind=PushDeviceToken.APNS)
]
gcm_devices = [
(b64_to_hex(device.token), device.ios_app_id, device.token)
for device in RemotePushDeviceToken.objects.filter(kind=PushDeviceToken.GCM)
]
mock_gcm.json_request.return_value = {"success": {gcm_devices[0][2]: message.id}}
apns_context.apns.send_notification = mock.AsyncMock()
apns_context.apns.send_notification.return_value.is_successful = False
apns_context.apns.send_notification.return_value.description = "Unregistered"
handle_push_notification(self.user_profile.id, missed_message)
self.assertEqual(
views_logger.output,
[
"INFO:zilencer.views:"
f"Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id:{str(self.user_profile.id)}><uuid:{str(self.user_profile.uuid)}>: "
f"{len(gcm_devices)} via FCM devices, {len(apns_devices)} via APNs devices"
],
)
for _, _, token in apns_devices:
self.assertIn(
"INFO:zerver.lib.push_notifications:"
f"APNs: Removing invalid/expired token {token} (Unregistered)",
pn_logger.output,
)
self.assertEqual(
RemotePushDeviceToken.objects.filter(kind=PushDeviceToken.APNS).count(), 0
)
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
@responses.activate
def test_connection_error(self) -> None:
self.setup_apns_tokens()
self.setup_gcm_tokens()
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=self.user_profile,
message=message,
)
missed_message = {
"user_profile_id": self.user_profile.id,
"message_id": message.id,
"trigger": "private_message",
}
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
URL = settings.PUSH_NOTIFICATION_BOUNCER_URL + "/api/v1/remotes/push/notify"
responses.add(responses.POST, URL, body=ConnectionError())
with mock.patch("zerver.lib.push_notifications.gcm_client") as mock_gcm:
gcm_devices = [
(b64_to_hex(device.token), device.ios_app_id, device.token)
for device in RemotePushDeviceToken.objects.filter(kind=PushDeviceToken.GCM)
]
mock_gcm.json_request.return_value = {"success": {gcm_devices[0][2]: message.id}}
with self.assertRaises(PushNotificationBouncerRetryLaterError):
handle_push_notification(self.user_profile.id, missed_message)
@mock.patch("zerver.lib.push_notifications.push_notifications_enabled", return_value=True)
def test_read_message(self, mock_push_notifications: mock.MagicMock) -> None:
user_profile = self.example_user("hamlet")
message = self.get_message(Recipient.PERSONAL, type_id=1)
usermessage = UserMessage.objects.create(
user_profile=user_profile,
message=message,
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
# If the message is unread, we should send push notifications.
with mock.patch(
"zerver.lib.push_notifications.send_apple_push_notification"
) as mock_send_apple, mock.patch(
"zerver.lib.push_notifications.send_android_push_notification"
) as mock_send_android:
handle_push_notification(user_profile.id, missed_message)
mock_send_apple.assert_called_once()
mock_send_android.assert_called_once()
# If the message has been read, don't send push notifications.
usermessage.flags.read = True
usermessage.save()
with mock.patch(
"zerver.lib.push_notifications.send_apple_push_notification"
) as mock_send_apple, mock.patch(
"zerver.lib.push_notifications.send_android_push_notification"
) as mock_send_android:
handle_push_notification(user_profile.id, missed_message)
mock_send_apple.assert_not_called()
mock_send_android.assert_not_called()
def test_deleted_message(self) -> None:
"""Simulates the race where message is deleted before handling push notifications"""
user_profile = self.example_user("hamlet")
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=user_profile,
flags=UserMessage.flags.read,
message=message,
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
# Now, delete the message the normal way
do_delete_messages(user_profile.realm, [message])
# This mock.patch() should be assertNoLogs once that feature
# is added to Python.
with mock.patch(
"zerver.lib.push_notifications.uses_notification_bouncer"
) as mock_check, mock.patch("logging.error") as mock_logging_error, mock.patch(
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
) as mock_push_notifications:
handle_push_notification(user_profile.id, missed_message)
mock_push_notifications.assert_called_once()
# Check we didn't proceed through and didn't log anything.
mock_check.assert_not_called()
mock_logging_error.assert_not_called()
def test_missing_message(self) -> None:
"""Simulates the race where message is missing when handling push notifications"""
user_profile = self.example_user("hamlet")
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=user_profile,
flags=UserMessage.flags.read,
message=message,
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
# Now delete the message forcefully, so it just doesn't exist.
message.delete()
# This should log an error
with mock.patch(
"zerver.lib.push_notifications.uses_notification_bouncer"
) as mock_check, self.assertLogs(level="INFO") as mock_logging_info, mock.patch(
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
) as mock_push_notifications:
handle_push_notification(user_profile.id, missed_message)
mock_push_notifications.assert_called_once()
# Check we didn't proceed through.
mock_check.assert_not_called()
self.assertEqual(
mock_logging_info.output,
[
f"INFO:root:Unexpected message access failure handling push notifications: {user_profile.id} {missed_message['message_id']}"
],
)
def test_send_notifications_to_bouncer(self) -> None:
user_profile = self.example_user("hamlet")
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=user_profile,
message=message,
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL=True), mock.patch(
"zerver.lib.push_notifications.get_message_payload_apns", return_value={"apns": True}
), mock.patch(
"zerver.lib.push_notifications.get_message_payload_gcm",
return_value=({"gcm": True}, {}),
), mock.patch(
"zerver.lib.push_notifications.send_notifications_to_bouncer", return_value=(3, 5)
) as mock_send, self.assertLogs(
"zerver.lib.push_notifications", level="INFO"
) as mock_logging_info:
handle_push_notification(user_profile.id, missed_message)
mock_send.assert_called_with(
user_profile.id,
{"apns": True},
{"gcm": True},
{},
)
self.assertEqual(
mock_logging_info.output,
[
f"INFO:zerver.lib.push_notifications:Sending push notifications to mobile clients for user {user_profile.id}",
f"INFO:zerver.lib.push_notifications:Sent mobile push notifications for user {user_profile.id} through bouncer: 3 via FCM devices, 5 via APNs devices",
],
)
def test_non_bouncer_push(self) -> None:
self.setup_apns_tokens()
self.setup_gcm_tokens()
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=self.user_profile,
message=message,
)
android_devices = list(
PushDeviceToken.objects.filter(user=self.user_profile, kind=PushDeviceToken.GCM)
)
apple_devices = list(
PushDeviceToken.objects.filter(user=self.user_profile, kind=PushDeviceToken.APNS)
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
with mock.patch(
"zerver.lib.push_notifications.get_message_payload_apns", return_value={"apns": True}
), mock.patch(
"zerver.lib.push_notifications.get_message_payload_gcm",
return_value=({"gcm": True}, {}),
), mock.patch(
"zerver.lib.push_notifications.send_apple_push_notification"
) as mock_send_apple, mock.patch(
"zerver.lib.push_notifications.send_android_push_notification"
) as mock_send_android, mock.patch(
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
) as mock_push_notifications:
handle_push_notification(self.user_profile.id, missed_message)
user_identity = UserPushIndentityCompat(user_id=self.user_profile.id)
mock_send_apple.assert_called_with(user_identity, apple_devices, {"apns": True})
mock_send_android.assert_called_with(user_identity, android_devices, {"gcm": True}, {})
mock_push_notifications.assert_called_once()
def test_send_remove_notifications_to_bouncer(self) -> None:
user_profile = self.example_user("hamlet")
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=user_profile,
message=message,
flags=UserMessage.flags.active_mobile_push_notification,
)
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL=True), mock.patch(
"zerver.lib.push_notifications.send_notifications_to_bouncer"
) as mock_send:
handle_remove_push_notification(user_profile.id, [message.id])
mock_send.assert_called_with(
user_profile.id,
{
"badge": 0,
"custom": {
"zulip": {
"server": "testserver",
"realm_id": self.sender.realm.id,
"realm_uri": "http://zulip.testserver",
"user_id": self.user_profile.id,
"event": "remove",
"zulip_message_ids": str(message.id),
},
},
},
{
"server": "testserver",
"realm_id": self.sender.realm.id,
"realm_uri": "http://zulip.testserver",
"user_id": self.user_profile.id,
"event": "remove",
"zulip_message_ids": str(message.id),
"zulip_message_id": message.id,
},
{"priority": "normal"},
)
user_message = UserMessage.objects.get(user_profile=self.user_profile, message=message)
self.assertEqual(user_message.flags.active_mobile_push_notification, False)
def test_non_bouncer_push_remove(self) -> None:
self.setup_apns_tokens()
self.setup_gcm_tokens()
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=self.user_profile,
message=message,
flags=UserMessage.flags.active_mobile_push_notification,
)
android_devices = list(
PushDeviceToken.objects.filter(user=self.user_profile, kind=PushDeviceToken.GCM)
)
apple_devices = list(
PushDeviceToken.objects.filter(user=self.user_profile, kind=PushDeviceToken.APNS)
)
with mock.patch(
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
) as mock_push_notifications, mock.patch(
"zerver.lib.push_notifications.send_android_push_notification"
) as mock_send_android, mock.patch(
"zerver.lib.push_notifications.send_apple_push_notification"
) as mock_send_apple:
handle_remove_push_notification(self.user_profile.id, [message.id])
mock_push_notifications.assert_called_once()
user_identity = UserPushIndentityCompat(user_id=self.user_profile.id)
mock_send_android.assert_called_with(
user_identity,
android_devices,
{
"server": "testserver",
"realm_id": self.sender.realm.id,
"realm_uri": "http://zulip.testserver",
"user_id": self.user_profile.id,
"event": "remove",
"zulip_message_ids": str(message.id),
"zulip_message_id": message.id,
},
{"priority": "normal"},
)
mock_send_apple.assert_called_with(
user_identity,
apple_devices,
{
"badge": 0,
"custom": {
"zulip": {
"server": "testserver",
"realm_id": self.sender.realm.id,
"realm_uri": "http://zulip.testserver",
"user_id": self.user_profile.id,
"event": "remove",
"zulip_message_ids": str(message.id),
}
},
},
)
user_message = UserMessage.objects.get(user_profile=self.user_profile, message=message)
self.assertEqual(user_message.flags.active_mobile_push_notification, False)
def test_user_message_does_not_exist(self) -> None:
"""This simulates a condition that should only be an error if the user is
not long-term idle; we fake it, though, in the sense that the user should
not have received the message in the first place"""
self.make_stream("public_stream")
sender = self.example_user("iago")
self.subscribe(sender, "public_stream")
message_id = self.send_stream_message(sender, "public_stream", "test")
missed_message = {"message_id": message_id}
with self.assertLogs("zerver.lib.push_notifications", level="ERROR") as logger, mock.patch(
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
) as mock_push_notifications:
handle_push_notification(self.user_profile.id, missed_message)
self.assertEqual(
"ERROR:zerver.lib.push_notifications:"
f"Could not find UserMessage with message_id {message_id} and user_id {self.user_profile.id}"
"\nNoneType: None", # This is an effect of using `exc_info=True` in the actual logger.
logger.output[0],
)
mock_push_notifications.assert_called_once()
def test_user_message_does_not_exist_remove(self) -> None:
"""This simulates a condition that should only be an error if the user is
not long-term idle; we fake it, though, in the sense that the user should
not have received the message in the first place"""
self.setup_apns_tokens()
self.setup_gcm_tokens()
self.make_stream("public_stream")
sender = self.example_user("iago")
self.subscribe(sender, "public_stream")
message_id = self.send_stream_message(sender, "public_stream", "test")
with mock.patch(
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
) as mock_push_notifications, mock.patch(
"zerver.lib.push_notifications.send_android_push_notification"
) as mock_send_android, mock.patch(
"zerver.lib.push_notifications.send_apple_push_notification"
) as mock_send_apple:
handle_remove_push_notification(self.user_profile.id, [message_id])
mock_push_notifications.assert_called_once()
mock_send_android.assert_called_once()
mock_send_apple.assert_called_once()
def test_user_message_soft_deactivated(self) -> None:
"""This simulates a condition that should only be an error if the user is
not long-term idle; we fake it, though, in the sense that the user should
not have received the message in the first place"""
self.setup_apns_tokens()
self.setup_gcm_tokens()
self.make_stream("public_stream")
sender = self.example_user("iago")
self.subscribe(self.user_profile, "public_stream")
self.subscribe(sender, "public_stream")
logger_string = "zulip.soft_deactivation"
with self.assertLogs(logger_string, level="INFO") as info_logs:
do_soft_deactivate_users([self.user_profile])
self.assertEqual(
info_logs.output,
[
f"INFO:{logger_string}:Soft deactivated user {self.user_profile.id}",
f"INFO:{logger_string}:Soft-deactivated batch of 1 users; 0 remain to process",
],
)
message_id = self.send_stream_message(sender, "public_stream", "test")
missed_message = {
"message_id": message_id,
"trigger": "stream_push_notify",
}
android_devices = list(
PushDeviceToken.objects.filter(user=self.user_profile, kind=PushDeviceToken.GCM)
)
apple_devices = list(
PushDeviceToken.objects.filter(user=self.user_profile, kind=PushDeviceToken.APNS)
)
with mock.patch(
"zerver.lib.push_notifications.get_message_payload_apns", return_value={"apns": True}
), mock.patch(
"zerver.lib.push_notifications.get_message_payload_gcm",
return_value=({"gcm": True}, {}),
), mock.patch(
"zerver.lib.push_notifications.send_apple_push_notification"
) as mock_send_apple, mock.patch(
"zerver.lib.push_notifications.send_android_push_notification"
) as mock_send_android, mock.patch(
"zerver.lib.push_notifications.logger.error"
) as mock_logger, mock.patch(
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
) as mock_push_notifications:
handle_push_notification(self.user_profile.id, missed_message)
mock_logger.assert_not_called()
user_identity = UserPushIndentityCompat(user_id=self.user_profile.id)
mock_send_apple.assert_called_with(user_identity, apple_devices, {"apns": True})
mock_send_android.assert_called_with(user_identity, android_devices, {"gcm": True}, {})
mock_push_notifications.assert_called_once()
@mock.patch("zerver.lib.push_notifications.push_notifications_enabled", return_value=True)
def test_user_push_soft_reactivate_soft_deactivated_user(
self, mock_push_notifications: mock.MagicMock
) -> None:
othello = self.example_user("othello")
cordelia = self.example_user("cordelia")
large_user_group = create_user_group(
"large_user_group", [self.user_profile, othello, cordelia], get_realm("zulip")
)
# Personal mention in a stream message should soft reactivate the user
with self.soft_deactivate_and_check_long_term_idle(self.user_profile, expected=False):
mention = f"@**{self.user_profile.full_name}**"
stream_mentioned_message_id = self.send_stream_message(othello, "Denmark", mention)
handle_push_notification(
self.user_profile.id,
{"message_id": stream_mentioned_message_id, "trigger": "mentioned"},
)
# Private message should soft reactivate the user
with self.soft_deactivate_and_check_long_term_idle(self.user_profile, expected=False):
# Soft reactivate the user by sending a personal message
personal_message_id = self.send_personal_message(othello, self.user_profile, "Message")
handle_push_notification(
self.user_profile.id,
{"message_id": personal_message_id, "trigger": "private_message"},
)
# Wild card mention should NOT soft reactivate the user
with self.soft_deactivate_and_check_long_term_idle(self.user_profile, expected=True):
# Soft reactivate the user by sending a personal message
mention = "@**all**"
stream_mentioned_message_id = self.send_stream_message(othello, "Denmark", mention)
handle_push_notification(
self.user_profile.id,
{"message_id": stream_mentioned_message_id, "trigger": "wildcard_mentioned"},
)
# Group mention should NOT soft reactivate the user
with self.soft_deactivate_and_check_long_term_idle(self.user_profile, expected=True):
# Soft reactivate the user by sending a personal message
mention = "@*large_user_group*"
stream_mentioned_message_id = self.send_stream_message(othello, "Denmark", mention)
handle_push_notification(
self.user_profile.id,
{
"message_id": stream_mentioned_message_id,
"trigger": "mentioned",
"mentioned_user_group_id": large_user_group.id,
},
)
@mock.patch("zerver.lib.push_notifications.logger.info")
@mock.patch("zerver.lib.push_notifications.push_notifications_enabled", return_value=True)
def test_user_push_notification_already_active(
self, mock_push_notifications: mock.MagicMock, mock_info: mock.MagicMock
) -> None:
user_profile = self.example_user("hamlet")
message = self.get_message(Recipient.PERSONAL, type_id=1)
UserMessage.objects.create(
user_profile=user_profile,
flags=UserMessage.flags.active_mobile_push_notification,
message=message,
)
missed_message = {
"message_id": message.id,
"trigger": "private_message",
}
handle_push_notification(user_profile.id, missed_message)
mock_push_notifications.assert_called_once()
# Check we didn't proceed ahead and function returned.
mock_info.assert_not_called()
class TestAPNs(PushNotificationTest):
def devices(self) -> List[DeviceToken]:
return list(
PushDeviceToken.objects.filter(user=self.user_profile, kind=PushDeviceToken.APNS)
)
def send(
self,
devices: Optional[List[Union[PushDeviceToken, RemotePushDeviceToken]]] = None,
payload_data: Dict[str, Any] = {},
) -> None:
send_apple_push_notification(
UserPushIndentityCompat(user_id=self.user_profile.id),
devices if devices is not None else self.devices(),
payload_data,
)
def test_get_apns_context(self) -> None:
"""This test is pretty hacky, and needs to carefully reset the state
it modifies in order to avoid leaking state that can lead to
nondeterministic results for other tests.
"""
import zerver.lib.push_notifications
zerver.lib.push_notifications.get_apns_context.cache_clear()
try:
with self.settings(APNS_CERT_FILE="/foo.pem"), mock.patch(
"ssl.SSLContext.load_cert_chain"
) as mock_load_cert_chain:
apns_context = get_apns_context()
assert apns_context is not None
try:
mock_load_cert_chain.assert_called_once_with("/foo.pem")
assert apns_context.apns.pool.loop == apns_context.loop
finally:
apns_context.loop.close()
finally:
# Reset the cache for `get_apns_context` so that we don't
# leak changes to the rest of the world.
zerver.lib.push_notifications.get_apns_context.cache_clear()
def test_not_configured(self) -> None:
self.setup_apns_tokens()
with mock.patch(
"zerver.lib.push_notifications.get_apns_context"
) as mock_get, self.assertLogs("zerver.lib.push_notifications", level="DEBUG") as logger:
mock_get.return_value = None
self.send()
notification_drop_log = (
"DEBUG:zerver.lib.push_notifications:"
"APNs: Dropping a notification because nothing configured. "
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE)."
)
from zerver.lib.push_notifications import initialize_push_notifications
initialize_push_notifications()
mobile_notifications_not_configured_log = (
"WARNING:zerver.lib.push_notifications:"
"Mobile push notifications are not configured.\n "
"See https://zulip.readthedocs.io/en/latest/production/mobile-push-notifications.html"
)
self.assertEqual(
[notification_drop_log, mobile_notifications_not_configured_log], logger.output
)
def test_success(self) -> None:
self.setup_apns_tokens()
with self.mock_apns() as apns_context, self.assertLogs(
"zerver.lib.push_notifications", level="INFO"
) as logger:
apns_context.apns.send_notification = mock.AsyncMock()
apns_context.apns.send_notification.return_value.is_successful = True
self.send()
for device in self.devices():
self.assertIn(
f"INFO:zerver.lib.push_notifications:APNs: Success sending for user <id:{self.user_profile.id}> to device {device.token}",
logger.output,
)
def test_http_retry_eventually_fails(self) -> None:
self.setup_apns_tokens()
with self.mock_apns() as apns_context, self.assertLogs(
"zerver.lib.push_notifications", level="INFO"
) as logger:
apns_context.apns.send_notification = mock.AsyncMock(
side_effect=aioapns.exceptions.ConnectionError()
)
self.send(devices=self.devices()[0:1])
self.assertIn(
f"ERROR:zerver.lib.push_notifications:APNs: ConnectionError sending for user <id:{self.user_profile.id}> to device {self.devices()[0].token}; check certificate expiration",
logger.output,
)
def test_internal_server_error(self) -> None:
self.setup_apns_tokens()
with self.mock_apns() as apns_context, self.assertLogs(
"zerver.lib.push_notifications", level="INFO"
) as logger:
apns_context.apns.send_notification = mock.AsyncMock()
apns_context.apns.send_notification.return_value.is_successful = False
apns_context.apns.send_notification.return_value.description = "InternalServerError"
self.send(devices=self.devices()[0:1])
self.assertIn(
f"WARNING:zerver.lib.push_notifications:APNs: Failed to send for user <id:{self.user_profile.id}> to device {self.devices()[0].token}: InternalServerError",
logger.output,
)
def test_modernize_apns_payload(self) -> None:
payload = {
"alert": "Message from Hamlet",
"badge": 0,
"custom": {"zulip": {"message_ids": [3]}},
}
self.assertEqual(
modernize_apns_payload(
{"alert": "Message from Hamlet", "message_ids": [3], "badge": 0}
),
payload,
)
self.assertEqual(modernize_apns_payload(payload), payload)
@mock.patch("zerver.lib.push_notifications.push_notifications_enabled", return_value=True)
def test_apns_badge_count(self, mock_push_notifications: mock.MagicMock) -> None:
user_profile = self.example_user("othello")
# Test APNs badge count for personal messages.
message_ids = [
self.send_personal_message(self.sender, user_profile, "Content of message")
for i in range(3)
]
self.assertEqual(get_apns_badge_count(user_profile), 0)
self.assertEqual(get_apns_badge_count_future(user_profile), 3)
# Similarly, test APNs badge count for stream mention.
stream = self.subscribe(user_profile, "Denmark")
message_ids += [
self.send_stream_message(
self.sender, stream.name, "Hi, @**Othello, the Moor of Venice**"
)
for i in range(2)
]
self.assertEqual(get_apns_badge_count(user_profile), 0)
self.assertEqual(get_apns_badge_count_future(user_profile), 5)
num_messages = len(message_ids)
# Mark the messages as read and test whether
# the count decreases correctly.
for i, message_id in enumerate(message_ids):
do_update_message_flags(user_profile, "add", "read", [message_id])
self.assertEqual(get_apns_badge_count(user_profile), 0)
self.assertEqual(get_apns_badge_count_future(user_profile), num_messages - i - 1)
mock_push_notifications.assert_called()
class TestGetAPNsPayload(PushNotificationTest):
def test_get_message_payload_apns_personal_message(self) -> None:
user_profile = self.example_user("othello")
message_id = self.send_personal_message(
self.sender,
user_profile,
"Content of personal message",
)
message = Message.objects.get(id=message_id)
payload = get_message_payload_apns(
user_profile, message, NotificationTriggers.PRIVATE_MESSAGE
)
expected = {
"alert": {
"title": "King Hamlet",
"subtitle": "",
"body": message.content,
},
"badge": 0,
"sound": "default",
"custom": {
"zulip": {
"message_ids": [message.id],
"recipient_type": "private",
"sender_email": self.sender.email,
"sender_id": self.sender.id,
"server": settings.EXTERNAL_HOST,
"realm_id": self.sender.realm.id,
"realm_uri": self.sender.realm.uri,
"user_id": user_profile.id,
},
},
}
self.assertDictEqual(payload, expected)
@mock.patch("zerver.lib.push_notifications.push_notifications_enabled", return_value=True)
def test_get_message_payload_apns_huddle_message(
self, mock_push_notifications: mock.MagicMock
) -> None:
user_profile = self.example_user("othello")
message_id = self.send_huddle_message(
self.sender, [self.example_user("othello"), self.example_user("cordelia")]
)
message = Message.objects.get(id=message_id)
payload = get_message_payload_apns(
user_profile, message, NotificationTriggers.PRIVATE_MESSAGE
)
expected = {
"alert": {
"title": "Cordelia, Lear's daughter, King Hamlet, Othello, the Moor of Venice",
"subtitle": "King Hamlet:",
"body": message.content,
},
"sound": "default",
"badge": 0,
"custom": {
"zulip": {
"message_ids": [message.id],
"recipient_type": "private",
"pm_users": ",".join(
str(user_profile_id)
for user_profile_id in sorted(
s.user_profile_id
for s in Subscription.objects.filter(recipient=message.recipient)
)
),
"sender_email": self.sender.email,
"sender_id": self.sender.id,
"server": settings.EXTERNAL_HOST,
"realm_id": self.sender.realm.id,
"realm_uri": self.sender.realm.uri,
"user_id": user_profile.id,
},
},
}
self.assertDictEqual(payload, expected)
mock_push_notifications.assert_called()
def test_get_message_payload_apns_stream_message(self) -> None:
stream = Stream.objects.filter(name="Verona").get()
message = self.get_message(Recipient.STREAM, stream.id)
payload = get_message_payload_apns(self.sender, message, NotificationTriggers.STREAM_PUSH)
expected = {
"alert": {
"title": "#Verona > Test topic",
"subtitle": "King Hamlet:",
"body": message.content,
},
"sound": "default",
"badge": 0,
"custom": {
"zulip": {
"message_ids": [message.id],
"recipient_type": "stream",
"sender_email": self.sender.email,
"sender_id": self.sender.id,
"stream": get_display_recipient(message.recipient),
"stream_id": stream.id,
"topic": message.topic_name(),
"server": settings.EXTERNAL_HOST,
"realm_id": self.sender.realm.id,
"realm_uri": self.sender.realm.uri,
"user_id": self.sender.id,
},
},
}
self.assertDictEqual(payload, expected)
def test_get_message_payload_apns_stream_mention(self) -> None:
user_profile = self.example_user("othello")
stream = Stream.objects.filter(name="Verona").get()
message = self.get_message(Recipient.STREAM, stream.id)
payload = get_message_payload_apns(user_profile, message, NotificationTriggers.MENTION)
expected = {
"alert": {
"title": "#Verona > Test topic",
"subtitle": "King Hamlet mentioned you:",
"body": message.content,
},
"sound": "default",
"badge": 0,
"custom": {
"zulip": {
"message_ids": [message.id],
"recipient_type": "stream",
"sender_email": self.sender.email,
"sender_id": self.sender.id,
"stream": get_display_recipient(message.recipient),
"stream_id": stream.id,
"topic": message.topic_name(),
"server": settings.EXTERNAL_HOST,
"realm_id": self.sender.realm.id,
"realm_uri": self.sender.realm.uri,
"user_id": user_profile.id,
},
},
}
self.assertDictEqual(payload, expected)
def test_get_message_payload_apns_user_group_mention(self) -> None:
user_profile = self.example_user("othello")
user_group = create_user_group("test_user_group", [user_profile], get_realm("zulip"))
stream = Stream.objects.filter(name="Verona").get()
message = self.get_message(Recipient.STREAM, stream.id)
payload = get_message_payload_apns(
user_profile, message, NotificationTriggers.MENTION, user_group.id, user_group.name
)
expected = {
"alert": {
"title": "#Verona > Test topic",
"subtitle": "King Hamlet mentioned @test_user_group:",
"body": message.content,
},
"sound": "default",
"badge": 0,
"custom": {
"zulip": {
"message_ids": [message.id],
"recipient_type": "stream",
"sender_email": self.sender.email,
"sender_id": self.sender.id,
"stream": get_display_recipient(message.recipient),
"stream_id": stream.id,
"topic": message.topic_name(),
"server": settings.EXTERNAL_HOST,
"realm_id": self.sender.realm.id,
"realm_uri": self.sender.realm.uri,
"user_id": user_profile.id,
"mentioned_user_group_id": user_group.id,
"mentioned_user_group_name": user_group.name,
}
},
}
self.assertDictEqual(payload, expected)
def test_get_message_payload_apns_stream_wildcard_mention(self) -> None:
user_profile = self.example_user("othello")
stream = Stream.objects.filter(name="Verona").get()
message = self.get_message(Recipient.STREAM, stream.id)
payload = get_message_payload_apns(
user_profile, message, NotificationTriggers.WILDCARD_MENTION
)
expected = {
"alert": {
"title": "#Verona > Test topic",
"subtitle": "King Hamlet mentioned everyone:",
"body": message.content,
},
"sound": "default",
"badge": 0,
"custom": {
"zulip": {
"message_ids": [message.id],
"recipient_type": "stream",
"sender_email": self.sender.email,
"sender_id": self.sender.id,
"stream": get_display_recipient(message.recipient),
"stream_id": stream.id,
"topic": message.topic_name(),
"server": settings.EXTERNAL_HOST,
"realm_id": self.sender.realm.id,
"realm_uri": self.sender.realm.uri,
"user_id": user_profile.id,
},
},
}
self.assertDictEqual(payload, expected)
@override_settings(PUSH_NOTIFICATION_REDACT_CONTENT=True)
def test_get_message_payload_apns_redacted_content(self) -> None:
user_profile = self.example_user("othello")
message_id = self.send_huddle_message(
self.sender, [self.example_user("othello"), self.example_user("cordelia")]
)
message = Message.objects.get(id=message_id)
payload = get_message_payload_apns(
user_profile, message, NotificationTriggers.PRIVATE_MESSAGE
)
expected = {
"alert": {
"title": "Cordelia, Lear's daughter, King Hamlet, Othello, the Moor of Venice",
"subtitle": "King Hamlet:",
"body": "*This organization has disabled including message content in mobile push notifications*",
},
"sound": "default",
"badge": 0,
"custom": {
"zulip": {
"message_ids": [message.id],
"recipient_type": "private",
"pm_users": ",".join(
str(user_profile_id)
for user_profile_id in sorted(
s.user_profile_id
for s in Subscription.objects.filter(recipient=message.recipient)
)
),
"sender_email": self.sender.email,
"sender_id": self.sender.id,
"server": settings.EXTERNAL_HOST,
"realm_id": self.sender.realm.id,
"realm_uri": self.sender.realm.uri,
"user_id": user_profile.id,
},
},
}
self.assertDictEqual(payload, expected)
class TestGetGCMPayload(PushNotificationTest):
def _test_get_message_payload_gcm_mentions(
self,
trigger: str,
alert: str,
*,
mentioned_user_group_id: Optional[int] = None,
mentioned_user_group_name: Optional[str] = None,
) -> None:
stream = Stream.objects.filter(name="Verona").get()
message = self.get_message(Recipient.STREAM, stream.id)
message.content = "a" * 210
message.rendered_content = "a" * 210
message.save()
hamlet = self.example_user("hamlet")
payload, gcm_options = get_message_payload_gcm(
hamlet, message, trigger, mentioned_user_group_id, mentioned_user_group_name
)
expected_payload = {
"user_id": hamlet.id,
"event": "message",
"alert": alert,
"zulip_message_id": message.id,
"time": datetime_to_timestamp(message.date_sent),
"content": "a" * 200 + "…",
"content_truncated": True,
"server": settings.EXTERNAL_HOST,
"realm_id": hamlet.realm.id,
"realm_uri": hamlet.realm.uri,
"sender_id": hamlet.id,
"sender_email": hamlet.email,
"sender_full_name": "King Hamlet",
"sender_avatar_url": absolute_avatar_url(message.sender),
"recipient_type": "stream",
"stream": get_display_recipient(message.recipient),
"stream_id": stream.id,
"topic": message.topic_name(),
}
if mentioned_user_group_id is not None:
expected_payload["mentioned_user_group_id"] = mentioned_user_group_id
expected_payload["mentioned_user_group_name"] = mentioned_user_group_name
self.assertDictEqual(payload, expected_payload)
self.assertDictEqual(
gcm_options,
{
"priority": "high",
},
)
def test_get_message_payload_gcm_personal_mention(self) -> None:
self._test_get_message_payload_gcm_mentions(
"mentioned", "King Hamlet mentioned you in #Verona"
)
def test_get_message_payload_gcm_user_group_mention(self) -> None:
# Note that the @mobile_team user group doesn't actually
# exist; this test is just verifying the formatting logic.
self._test_get_message_payload_gcm_mentions(
"mentioned",
"King Hamlet mentioned @mobile_team in #Verona",
mentioned_user_group_id=3,
mentioned_user_group_name="mobile_team",
)
def test_get_message_payload_gcm_wildcard_mention(self) -> None:
self._test_get_message_payload_gcm_mentions(
"wildcard_mentioned", "King Hamlet mentioned everyone in #Verona"
)
def test_get_message_payload_gcm_private_message(self) -> None:
message = self.get_message(Recipient.PERSONAL, 1)
hamlet = self.example_user("hamlet")
payload, gcm_options = get_message_payload_gcm(
hamlet, message, NotificationTriggers.PRIVATE_MESSAGE
)
self.assertDictEqual(
payload,
{
"user_id": hamlet.id,
"event": "message",
"alert": "New private message from King Hamlet",
"zulip_message_id": message.id,
"time": datetime_to_timestamp(message.date_sent),
"content": message.content,
"content_truncated": False,
"server": settings.EXTERNAL_HOST,
"realm_id": hamlet.realm.id,
"realm_uri": hamlet.realm.uri,
"sender_id": hamlet.id,
"sender_email": hamlet.email,
"sender_full_name": "King Hamlet",
"sender_avatar_url": absolute_avatar_url(message.sender),
"recipient_type": "private",
},
)
self.assertDictEqual(
gcm_options,
{
"priority": "high",
},
)
def test_get_message_payload_gcm_stream_notifications(self) -> None:
stream = Stream.objects.get(name="Denmark")
message = self.get_message(Recipient.STREAM, stream.id)
hamlet = self.example_user("hamlet")
payload, gcm_options = get_message_payload_gcm(
hamlet, message, NotificationTriggers.STREAM_PUSH
)
self.assertDictEqual(
payload,
{
"user_id": hamlet.id,
"event": "message",
"alert": "New stream message from King Hamlet in #Denmark",
"zulip_message_id": message.id,
"time": datetime_to_timestamp(message.date_sent),
"content": message.content,
"content_truncated": False,
"server": settings.EXTERNAL_HOST,
"realm_id": hamlet.realm.id,
"realm_uri": hamlet.realm.uri,
"sender_id": hamlet.id,
"sender_email": hamlet.email,
"sender_full_name": "King Hamlet",
"sender_avatar_url": absolute_avatar_url(message.sender),
"recipient_type": "stream",
"topic": "Test topic",
"stream": "Denmark",
"stream_id": stream.id,
},
)
self.assertDictEqual(
gcm_options,
{
"priority": "high",
},
)
@override_settings(PUSH_NOTIFICATION_REDACT_CONTENT=True)
def test_get_message_payload_gcm_redacted_content(self) -> None:
stream = Stream.objects.get(name="Denmark")
message = self.get_message(Recipient.STREAM, stream.id)
hamlet = self.example_user("hamlet")
payload, gcm_options = get_message_payload_gcm(
hamlet, message, NotificationTriggers.STREAM_PUSH
)
self.assertDictEqual(
payload,
{
"user_id": hamlet.id,
"event": "message",
"alert": "New stream message from King Hamlet in #Denmark",
"zulip_message_id": message.id,
"time": datetime_to_timestamp(message.date_sent),
"content": "*This organization has disabled including message content in mobile push notifications*",
"content_truncated": False,
"server": settings.EXTERNAL_HOST,
"realm_id": hamlet.realm.id,
"realm_uri": hamlet.realm.uri,
"sender_id": hamlet.id,
"sender_email": hamlet.email,
"sender_full_name": "King Hamlet",
"sender_avatar_url": absolute_avatar_url(message.sender),
"recipient_type": "stream",
"topic": "Test topic",
"stream": "Denmark",
"stream_id": stream.id,
},
)
self.assertDictEqual(
gcm_options,
{
"priority": "high",
},
)
class TestSendNotificationsToBouncer(ZulipTestCase):
@mock.patch("zerver.lib.remote_server.send_to_push_bouncer")
def test_send_notifications_to_bouncer(self, mock_send: mock.MagicMock) -> None:
mock_send.return_value = {"total_android_devices": 1, "total_apple_devices": 3}
total_android_devices, total_apple_devices = send_notifications_to_bouncer(
1, {"apns": True}, {"gcm": True}, {}
)
post_data = {
"user_uuid": get_user_profile_by_id(1).uuid,
"user_id": 1,
"apns_payload": {"apns": True},
"gcm_payload": {"gcm": True},
"gcm_options": {},
}
mock_send.assert_called_with(
"POST",
"push/notify",
orjson.dumps(post_data),
extra_headers={"Content-type": "application/json"},
)
self.assertEqual(total_android_devices, 1)
self.assertEqual(total_apple_devices, 3)
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
class TestSendToPushBouncer(ZulipTestCase):
def add_mock_response(
self, body: bytes = orjson.dumps({"msg": "error"}), status: int = 200
) -> None:
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
URL = settings.PUSH_NOTIFICATION_BOUNCER_URL + "/api/v1/remotes/register"
responses.add(responses.POST, URL, body=body, status=status)
@responses.activate
def test_500_error(self) -> None:
self.add_mock_response(status=500)
with self.assertLogs(level="WARNING") as m:
with self.assertRaises(PushNotificationBouncerRetryLaterError):
send_to_push_bouncer("POST", "register", {"data": "true"})
self.assertEqual(m.output, ["WARNING:root:Received 500 from push notification bouncer"])
@responses.activate
def test_400_error(self) -> None:
self.add_mock_response(status=400)
with self.assertRaises(JsonableError) as exc:
send_to_push_bouncer("POST", "register", {"msg": "true"})
self.assertEqual(exc.exception.msg, "error")
@responses.activate
def test_400_error_invalid_server_key(self) -> None:
from zilencer.auth import InvalidZulipServerError
# This is the exception our decorator uses for an invalid Zulip server
error_response = json_response_from_error(InvalidZulipServerError("testRole"))
self.add_mock_response(body=error_response.content, status=error_response.status_code)
with self.assertRaises(PushNotificationBouncerException) as exc:
send_to_push_bouncer("POST", "register", {"msg": "true"})
self.assertEqual(
str(exc.exception),
"Push notifications bouncer error: "
"Zulip server auth failure: testRole is not registered -- did you run `manage.py register_server`?",
)
@responses.activate
def test_400_error_when_content_is_not_serializable(self) -> None:
self.add_mock_response(body=b"/", status=400)
with self.assertRaises(orjson.JSONDecodeError):
send_to_push_bouncer("POST", "register", {"msg": "true"})
@responses.activate
def test_300_error(self) -> None:
self.add_mock_response(body=b"/", status=300)
with self.assertRaises(PushNotificationBouncerException) as exc:
send_to_push_bouncer("POST", "register", {"msg": "true"})
self.assertEqual(
str(exc.exception), "Push notification bouncer returned unexpected status code 300"
)
class TestPushApi(BouncerTestCase):
@responses.activate
def test_push_api_error_handling(self) -> None:
user = self.example_user("cordelia")
self.login_user(user)
endpoints = [
("/json/users/me/apns_device_token", "apple-tokenaz"),
("/json/users/me/android_gcm_reg_id", "android-token"),
]
# Test error handling
for endpoint, label in endpoints:
# Try adding/removing tokens that are too big...
broken_token = "a" * 5000 # too big
result = self.client_post(endpoint, {"token": broken_token})
self.assert_json_error(result, "Empty or invalid length token")
if label == "apple-tokenaz":
result = self.client_post(endpoint, {"token": "xyz has non-hex characters"})
self.assert_json_error(result, "Invalid APNS token")
result = self.client_delete(endpoint, {"token": broken_token})
self.assert_json_error(result, "Empty or invalid length token")
# Try to remove a non-existent token...
result = self.client_delete(endpoint, {"token": "abcd1234"})
self.assert_json_error(result, "Token does not exist")
# Use push notification bouncer and try to remove non-existing tokens.
with self.settings(
PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com"
), responses.RequestsMock() as resp:
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
URL = settings.PUSH_NOTIFICATION_BOUNCER_URL + "/api/v1/remotes/push/unregister"
resp.add_callback(responses.POST, URL, callback=self.request_callback)
result = self.client_delete(endpoint, {"token": "abcd1234"})
self.assert_json_error(result, "Token does not exist")
self.assertTrue(resp.assert_call_count(URL, 1))
@responses.activate
def test_push_api_add_and_remove_device_tokens(self) -> None:
user = self.example_user("cordelia")
self.login_user(user)
no_bouncer_requests = [
("/json/users/me/apns_device_token", "apple-tokenaa"),
("/json/users/me/android_gcm_reg_id", "android-token-1"),
]
bouncer_requests = [
("/json/users/me/apns_device_token", "apple-tokenbb"),
("/json/users/me/android_gcm_reg_id", "android-token-2"),
]
# Add tokens without using push notification bouncer.
for endpoint, token in no_bouncer_requests:
# Test that we can push twice.
result = self.client_post(endpoint, {"token": token})
self.assert_json_success(result)
result = self.client_post(endpoint, {"token": token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
self.assert_length(tokens, 1)
self.assertEqual(tokens[0].token, token)
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com"):
self.add_mock_response()
# Enable push notification bouncer and add tokens.
for endpoint, token in bouncer_requests:
# Test that we can push twice.
result = self.client_post(endpoint, {"token": token})
self.assert_json_success(result)
result = self.client_post(endpoint, {"token": token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
self.assert_length(tokens, 1)
self.assertEqual(tokens[0].token, token)
remote_tokens = list(
RemotePushDeviceToken.objects.filter(user_uuid=user.uuid, token=token)
)
self.assert_length(remote_tokens, 1)
self.assertEqual(remote_tokens[0].token, token)
# PushDeviceToken will include all the device tokens.
token_values = list(PushDeviceToken.objects.values_list("token", flat=True))
self.assertEqual(
token_values, ["apple-tokenaa", "android-token-1", "apple-tokenbb", "android-token-2"]
)
# RemotePushDeviceToken will only include tokens of
# the devices using push notification bouncer.
remote_token_values = list(RemotePushDeviceToken.objects.values_list("token", flat=True))
self.assertEqual(remote_token_values, ["apple-tokenbb", "android-token-2"])
# Test removing tokens without using push notification bouncer.
for endpoint, token in no_bouncer_requests:
result = self.client_delete(endpoint, {"token": token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
self.assert_length(tokens, 0)
# Use push notification bouncer and test removing device tokens.
# Tokens will be removed both locally and remotely.
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com"):
for endpoint, token in bouncer_requests:
result = self.client_delete(endpoint, {"token": token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
remote_tokens = list(
RemotePushDeviceToken.objects.filter(user_uuid=user.uuid, token=token)
)
self.assert_length(tokens, 0)
self.assert_length(remote_tokens, 0)
# Verify that the above process indeed removed all the tokens we created.
self.assertEqual(RemotePushDeviceToken.objects.all().count(), 0)
self.assertEqual(PushDeviceToken.objects.all().count(), 0)
class GCMParseOptionsTest(ZulipTestCase):
def test_invalid_option(self) -> None:
with self.assertRaises(JsonableError):
parse_gcm_options({"invalid": True}, {})
def test_invalid_priority_value(self) -> None:
with self.assertRaises(JsonableError):
parse_gcm_options({"priority": "invalid"}, {})
def test_default_priority(self) -> None:
self.assertEqual("high", parse_gcm_options({}, {"event": "message"}))
self.assertEqual("normal", parse_gcm_options({}, {"event": "remove"}))
self.assertEqual("normal", parse_gcm_options({}, {}))
def test_explicit_priority(self) -> None:
self.assertEqual("normal", parse_gcm_options({"priority": "normal"}, {}))
self.assertEqual("high", parse_gcm_options({"priority": "high"}, {}))
@mock.patch("zerver.lib.push_notifications.gcm_client")
class GCMSendTest(PushNotificationTest):
def setUp(self) -> None:
super().setUp()
self.setup_gcm_tokens()
def get_gcm_data(self, **kwargs: Any) -> Dict[str, Any]:
data = {
"key 1": "Data 1",
"key 2": "Data 2",
}
data.update(kwargs)
return data
def test_gcm_is_none(self, mock_gcm: mock.MagicMock) -> None:
mock_gcm.__bool__.return_value = False
with self.assertLogs("zerver.lib.push_notifications", level="DEBUG") as logger:
send_android_push_notification_to_user(self.user_profile, {}, {})
self.assertEqual(
"DEBUG:zerver.lib.push_notifications:"
"Skipping sending a GCM push notification since PUSH_NOTIFICATION_BOUNCER_URL "
"and ANDROID_GCM_API_KEY are both unset",
logger.output[0],
)
def test_json_request_raises_ioerror(self, mock_gcm: mock.MagicMock) -> None:
mock_gcm.json_request.side_effect = OSError("error")
with self.assertLogs("zerver.lib.push_notifications", level="WARNING") as logger:
send_android_push_notification_to_user(self.user_profile, {}, {})
self.assertIn(
"WARNING:zerver.lib.push_notifications:Error while pushing to GCM\nTraceback ",
logger.output[0],
)
@mock.patch("zerver.lib.push_notifications.logger.warning")
def test_success(self, mock_warning: mock.MagicMock, mock_gcm: mock.MagicMock) -> None:
res = {}
res["success"] = {token: ind for ind, token in enumerate(self.gcm_tokens)}
mock_gcm.json_request.return_value = res
data = self.get_gcm_data()
with self.assertLogs("zerver.lib.push_notifications", level="INFO") as logger:
send_android_push_notification_to_user(self.user_profile, data, {})
self.assert_length(logger.output, 3)
log_msg1 = f"INFO:zerver.lib.push_notifications:GCM: Sending notification for local user <id:{self.user_profile.id}> to 2 devices"
log_msg2 = f"INFO:zerver.lib.push_notifications:GCM: Sent {1111} as {0}"
log_msg3 = f"INFO:zerver.lib.push_notifications:GCM: Sent {2222} as {1}"
self.assertEqual([log_msg1, log_msg2, log_msg3], logger.output)
mock_warning.assert_not_called()
def test_canonical_equal(self, mock_gcm: mock.MagicMock) -> None:
res = {}
res["canonical"] = {1: 1}
mock_gcm.json_request.return_value = res
data = self.get_gcm_data()
with self.assertLogs("zerver.lib.push_notifications", level="WARNING") as logger:
send_android_push_notification_to_user(self.user_profile, data, {})
self.assertEqual(
f"WARNING:zerver.lib.push_notifications:GCM: Got canonical ref but it already matches our ID {1}!",
logger.output[0],
)
def test_canonical_pushdevice_not_present(self, mock_gcm: mock.MagicMock) -> None:
res = {}
t1 = hex_to_b64("1111")
t2 = hex_to_b64("3333")
res["canonical"] = {t1: t2}
mock_gcm.json_request.return_value = res
def get_count(hex_token: str) -> int:
token = hex_to_b64(hex_token)
return PushDeviceToken.objects.filter(token=token, kind=PushDeviceToken.GCM).count()
self.assertEqual(get_count("1111"), 1)
self.assertEqual(get_count("3333"), 0)
data = self.get_gcm_data()
with self.assertLogs("zerver.lib.push_notifications", level="WARNING") as logger:
send_android_push_notification_to_user(self.user_profile, data, {})
msg = f"WARNING:zerver.lib.push_notifications:GCM: Got canonical ref {t2} replacing {t1} but new ID not registered! Updating."
self.assertEqual(msg, logger.output[0])
self.assertEqual(get_count("1111"), 0)
self.assertEqual(get_count("3333"), 1)
def test_canonical_pushdevice_different(self, mock_gcm: mock.MagicMock) -> None:
res = {}
old_token = hex_to_b64("1111")
new_token = hex_to_b64("2222")
res["canonical"] = {old_token: new_token}
mock_gcm.json_request.return_value = res
def get_count(hex_token: str) -> int:
token = hex_to_b64(hex_token)
return PushDeviceToken.objects.filter(token=token, kind=PushDeviceToken.GCM).count()
self.assertEqual(get_count("1111"), 1)
self.assertEqual(get_count("2222"), 1)
data = self.get_gcm_data()
with self.assertLogs("zerver.lib.push_notifications", level="INFO") as logger:
send_android_push_notification_to_user(self.user_profile, data, {})
self.assertEqual(
f"INFO:zerver.lib.push_notifications:GCM: Sending notification for local user <id:{self.user_profile.id}> to 2 devices",
logger.output[0],
)
self.assertEqual(
f"INFO:zerver.lib.push_notifications:GCM: Got canonical ref {new_token}, dropping {old_token}",
logger.output[1],
)
self.assertEqual(get_count("1111"), 0)
self.assertEqual(get_count("2222"), 1)
def test_not_registered(self, mock_gcm: mock.MagicMock) -> None:
res = {}
token = hex_to_b64("1111")
res["errors"] = {"NotRegistered": [token]}
mock_gcm.json_request.return_value = res
def get_count(hex_token: str) -> int:
token = hex_to_b64(hex_token)
return PushDeviceToken.objects.filter(token=token, kind=PushDeviceToken.GCM).count()
self.assertEqual(get_count("1111"), 1)
data = self.get_gcm_data()
with self.assertLogs("zerver.lib.push_notifications", level="INFO") as logger:
send_android_push_notification_to_user(self.user_profile, data, {})
self.assertEqual(
f"INFO:zerver.lib.push_notifications:GCM: Sending notification for local user <id:{self.user_profile.id}> to 2 devices",
logger.output[0],
)
self.assertEqual(
f"INFO:zerver.lib.push_notifications:GCM: Removing {token}",
logger.output[1],
)
self.assertEqual(get_count("1111"), 0)
def test_failure(self, mock_gcm: mock.MagicMock) -> None:
res = {}
token = hex_to_b64("1111")
res["errors"] = {"Failed": [token]}
mock_gcm.json_request.return_value = res
data = self.get_gcm_data()
with self.assertLogs("zerver.lib.push_notifications", level="WARNING") as logger:
send_android_push_notification_to_user(self.user_profile, data, {})
msg = f"WARNING:zerver.lib.push_notifications:GCM: Delivery to {token} failed: Failed"
self.assertEqual(msg, logger.output[0])
class TestClearOnRead(ZulipTestCase):
def test_mark_stream_as_read(self) -> None:
n_msgs = 3
hamlet = self.example_user("hamlet")
hamlet.enable_stream_push_notifications = True
hamlet.save()
stream = self.subscribe(hamlet, "Denmark")
message_ids = [
self.send_stream_message(self.example_user("iago"), stream.name, f"yo {i}")
for i in range(n_msgs)
]
UserMessage.objects.filter(
user_profile_id=hamlet.id,
message_id__in=message_ids,
).update(flags=F("flags").bitor(UserMessage.flags.active_mobile_push_notification))
with mock_queue_publish("zerver.actions.message_flags.queue_json_publish") as mock_publish:
assert stream.recipient_id is not None
do_mark_stream_messages_as_read(hamlet, stream.recipient_id)
queue_items = [c[0][1] for c in mock_publish.call_args_list]
groups = [item["message_ids"] for item in queue_items]
self.assert_length(groups, 1)
self.assertEqual(sum(len(g) for g in groups), len(message_ids))
self.assertEqual({id for g in groups for id in g}, set(message_ids))
class TestPushNotificationsContent(ZulipTestCase):
def test_fixtures(self) -> None:
fixtures = orjson.loads(self.fixture_data("markdown_test_cases.json"))
tests = fixtures["regular_tests"]
for test in tests:
if "text_content" in test:
with self.subTest(markdown_test_case=test["name"]):
output = get_mobile_push_content(test["expected_output"])
self.assertEqual(output, test["text_content"])
def test_backend_only_fixtures(self) -> None:
realm = get_realm("zulip")
cordelia = self.example_user("cordelia")
stream = get_stream("Verona", realm)
fixtures = [
{
"name": "realm_emoji",
"rendered_content": f'<p>Testing <img alt=":green_tick:" class="emoji" src="/user_avatars/{realm.id}/emoji/green_tick.png" title="green tick"> realm emoji.</p>',
"expected_output": "Testing :green_tick: realm emoji.",
},
{
"name": "mentions",
"rendered_content": f'<p>Mentioning <span class="user-mention" data-user-id="{cordelia.id}">@Cordelia, Lear\'s daughter</span>.</p>',
"expected_output": "Mentioning @Cordelia, Lear's daughter.",
},
{
"name": "stream_names",
"rendered_content": f'<p>Testing stream names <a class="stream" data-stream-id="{stream.id}" href="/#narrow/stream/Verona">#Verona</a>.</p>',
"expected_output": "Testing stream names #Verona.",
},
]
for test in fixtures:
actual_output = get_mobile_push_content(test["rendered_content"])
self.assertEqual(actual_output, test["expected_output"])
@skipUnless(settings.ZILENCER_ENABLED, "requires zilencer")
class PushBouncerSignupTest(ZulipTestCase):
def test_deactivate_remote_server(self) -> None:
zulip_org_id = str(uuid.uuid4())
zulip_org_key = get_random_string(64)
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="example.com",
contact_email="server-admin@example.com",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_success(result)
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
self.assertEqual(server.hostname, "example.com")
self.assertEqual(server.contact_email, "server-admin@example.com")
result = self.uuid_post(zulip_org_id, "/api/v1/remotes/server/deactivate", subdomain="")
self.assert_json_success(result)
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
remote_realm_audit_log = RemoteZulipServerAuditLog.objects.filter(
event_type=RealmAuditLog.REMOTE_SERVER_DEACTIVATED
).last()
assert remote_realm_audit_log is not None
self.assertTrue(server.deactivated)
# Now test that trying to deactivate again reports the right error.
result = self.uuid_post(
zulip_org_id, "/api/v1/remotes/server/deactivate", request, subdomain=""
)
self.assert_json_error(
result,
"The mobile push notification service registration for your server has been deactivated",
status_code=401,
)
def test_push_signup_invalid_host(self) -> None:
zulip_org_id = str(uuid.uuid4())
zulip_org_key = get_random_string(64)
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="invalid-host",
contact_email="server-admin@example.com",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_error(result, "invalid-host is not a valid hostname")
def test_push_signup_invalid_email(self) -> None:
zulip_org_id = str(uuid.uuid4())
zulip_org_key = get_random_string(64)
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="example.com",
contact_email="server-admin",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_error(result, "Enter a valid email address.")
def test_push_signup_invalid_zulip_org_id(self) -> None:
zulip_org_id = "x" * RemoteZulipServer.UUID_LENGTH
zulip_org_key = get_random_string(64)
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="example.com",
contact_email="server-admin@example.com",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_error(result, "Invalid UUID")
# This looks mostly like a proper UUID, but isn't actually a valid UUIDv4,
# which makes it slip past a basic validation via initializing uuid.UUID with it.
# Thus we should test this scenario separately.
zulip_org_id = "18cedb98-5222-5f34-50a9-fc418e1ba972"
request["zulip_org_id"] = zulip_org_id
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_error(result, "Invalid UUID")
def test_push_signup_success(self) -> None:
zulip_org_id = str(uuid.uuid4())
zulip_org_key = get_random_string(64)
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="example.com",
contact_email="server-admin@example.com",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_success(result)
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
self.assertEqual(server.hostname, "example.com")
self.assertEqual(server.contact_email, "server-admin@example.com")
# Update our hostname
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="zulip.example.com",
contact_email="server-admin@example.com",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_success(result)
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
self.assertEqual(server.hostname, "zulip.example.com")
self.assertEqual(server.contact_email, "server-admin@example.com")
# Now test rotating our key
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="example.com",
contact_email="server-admin@example.com",
new_org_key=get_random_string(64),
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_success(result)
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
self.assertEqual(server.hostname, "example.com")
self.assertEqual(server.contact_email, "server-admin@example.com")
zulip_org_key = request["new_org_key"]
self.assertEqual(server.api_key, zulip_org_key)
# Update our hostname
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=zulip_org_key,
hostname="zulip.example.com",
contact_email="new-server-admin@example.com",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_success(result)
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
self.assertEqual(server.hostname, "zulip.example.com")
self.assertEqual(server.contact_email, "new-server-admin@example.com")
# Now test trying to double-create with a new random key fails
request = dict(
zulip_org_id=zulip_org_id,
zulip_org_key=get_random_string(64),
hostname="example.com",
contact_email="server-admin@example.com",
)
result = self.client_post("/api/v1/remotes/server/register", request)
self.assert_json_error(
result, f"Zulip server auth failure: key does not match role {zulip_org_id}"
)
class TestUserPushIndentityCompat(ZulipTestCase):
def test_filter_q(self) -> None:
user_identity_id = UserPushIndentityCompat(user_id=1)
user_identity_uuid = UserPushIndentityCompat(user_uuid="aaaa")
user_identity_both = UserPushIndentityCompat(user_id=1, user_uuid="aaaa")
self.assertEqual(user_identity_id.filter_q(), Q(user_id=1))
self.assertEqual(user_identity_uuid.filter_q(), Q(user_uuid="aaaa"))
self.assertEqual(user_identity_both.filter_q(), Q(user_uuid="aaaa") | Q(user_id=1))
def test_eq(self) -> None:
user_identity_a = UserPushIndentityCompat(user_id=1)
user_identity_b = UserPushIndentityCompat(user_id=1)
user_identity_c = UserPushIndentityCompat(user_id=2)
self.assertEqual(user_identity_a, user_identity_b)
self.assertNotEqual(user_identity_a, user_identity_c)
# An integer can't be equal to an instance of the class.
self.assertNotEqual(user_identity_a, 1)
| {
"content_hash": "d5c551150e0a769826241198160ae203",
"timestamp": "",
"source": "github",
"line_count": 2730,
"max_line_length": 188,
"avg_line_length": 43.37032967032967,
"alnum_prop": 0.5877315225378164,
"repo_name": "rht/zulip",
"id": "b8ea3699ce67ff4356e1e97e35751ac1e199f99c",
"size": "118403",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "zerver/tests/test_push_notifications.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "489438"
},
{
"name": "Dockerfile",
"bytes": "4025"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "743287"
},
{
"name": "Handlebars",
"bytes": "374049"
},
{
"name": "JavaScript",
"bytes": "4000260"
},
{
"name": "Perl",
"bytes": "10163"
},
{
"name": "Puppet",
"bytes": "112128"
},
{
"name": "Python",
"bytes": "10160680"
},
{
"name": "Ruby",
"bytes": "3459"
},
{
"name": "Shell",
"bytes": "146797"
},
{
"name": "TypeScript",
"bytes": "284836"
}
],
"symlink_target": ""
} |
from django.contrib.auth.models import Permission
from django.core.urlresolvers import reverse
from django.utils.six.moves import http_client
from django_webtest import WebTest
from purl import URL
from oscar.core.compat import get_user_model
User = get_user_model()
def add_permissions(user, permissions):
"""
Grant permissions to the passed user
:param permissions: e.g. ['partner.dashboard_access']
"""
for permission in permissions:
app_label, __, codename = permission.partition('.')
perm = Permission.objects.get(content_type__app_label=app_label,
codename=codename)
user.user_permissions.add(perm)
class WebTestCase(WebTest):
is_staff = False
is_anonymous = False
is_superuser = False
username = 'testuser'
email = 'testuser@buymore.com'
password = 'somefancypassword'
permissions = []
def setUp(self):
self.user = None
if not self.is_anonymous:
self.user = self.create_user(
self.username, self.email, self.password)
self.user.is_staff = self.is_staff
add_permissions(self.user, self.permissions)
self.user.save()
def create_user(self, username=None, email=None, password=None):
"""
Create a user for use in a test.
As usernames are optional in newer versions of Django, it only sets it
if exists.
"""
kwargs = {'email': email, 'password': password}
if 'username' in User._meta.get_all_field_names():
kwargs['username'] = username
return User.objects.create_user(**kwargs)
def get(self, url, **kwargs):
kwargs.setdefault('user', self.user)
return self.app.get(url, **kwargs)
def post(self, url, **kwargs):
kwargs.setdefault('user', self.user)
return self.app.post(url, **kwargs)
# Custom assertions
def assertIsRedirect(self, response, expected_url=None):
self.assertTrue(response.status_code in (
http_client.FOUND, http_client.MOVED_PERMANENTLY))
if expected_url:
location = URL.from_string(response['Location'])
self.assertEqual(expected_url, location.path())
def assertIsNotRedirect(self, response):
self.assertIsOk(response)
self.assertTrue(response.status_code not in (
http_client.FOUND, http_client.MOVED_PERMANENTLY
))
def assertRedirectsTo(self, response, url_name, kwargs=None):
"""
Asserts that a response is a redirect to a given URL name.
"""
self.assertIsRedirect(response)
location = response.headers['Location']
for unwanted in ['http://localhost:80', 'http://testserver']:
location = location.replace(unwanted, '')
self.assertEqual(reverse(url_name, kwargs=kwargs), location)
def assertNoAccess(self, response):
self.assertContext(response)
self.assertTrue(response.status_code in (http_client.NOT_FOUND,
http_client.FORBIDDEN))
def assertIsOk(self, response):
self.assertEqual(http_client.OK, response.status_code)
def assertContext(self, response):
self.assertTrue(response.context is not None,
'No context was returned')
def assertInContext(self, response, key):
self.assertContext(response)
self.assertTrue(key in response.context,
"Context should contain a variable '%s'" % key)
| {
"content_hash": "ca9757f029d514e2a2e30ddffa8591e3",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 78,
"avg_line_length": 34.08571428571429,
"alnum_prop": 0.6267113718915899,
"repo_name": "eddiep1101/django-oscar",
"id": "955fd78346912da2c40c69e1c7226cfc51352766",
"size": "3579",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "src/oscar/test/testcases.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "544471"
},
{
"name": "HTML",
"bytes": "495069"
},
{
"name": "JavaScript",
"bytes": "433783"
},
{
"name": "Makefile",
"bytes": "2421"
},
{
"name": "Python",
"bytes": "1676682"
},
{
"name": "Shell",
"bytes": "3047"
},
{
"name": "XSLT",
"bytes": "24882"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.http import HttpResponse,HttpResponseRedirect
from django.utils.importlib import import_module
from django.template import Context
from django.template.loader import get_template
from django.shortcuts import get_object_or_404, render_to_response
from django.views.generic.simple import direct_to_template
def main_page(request):
template = get_template('main_page.html')
variables = Context({
'head_title': 'Django Bookmarks',
'page_title': 'Welcome to Django Bookmarks',
'page_body': 'Where you can store and share bookmarks!'
})
output = template.render(variables)
return HttpResponse(output)
def fb(request):
# try:
# user = User.objects.get(username=username)
# except:
# raise Http404('Requested user not found.')
# bookmarks = user.bookmark_set.all()
#return render_to_response('registration/blank.html')
return direct_to_template(request, 'registration/fblogin.html')
def image(request):
image_data = open("static/images/logo.png", "rb").read()
return HttpResponse(image_data,mimetype="image/png")
def html5(request):
return render_to_response('html5/html5example.html') | {
"content_hash": "7193591f33da54345b7fc3ac6468edef",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 67,
"avg_line_length": 34.1764705882353,
"alnum_prop": 0.7564543889845095,
"repo_name": "westinedu/wrgroups",
"id": "3932327c903ae322b8bcb9ad4d8408947f8dd9e2",
"size": "1162",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bookmarks/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "149426"
},
{
"name": "Python",
"bytes": "4241473"
},
{
"name": "Shell",
"bytes": "1355"
}
],
"symlink_target": ""
} |
from datetime import datetime
from couchdbkit import ResourceNotFound
from django.contrib import messages
from django.core.exceptions import SuspiciousOperation
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponseBadRequest, Http404
from django.utils.decorators import method_decorator
import json
from corehq.apps.export.custom_export_helpers import CustomExportHelper
from corehq.apps.reports.display import xmlns_to_name
from corehq.apps.reports.standard.export import ExcelExportReport, CaseExportReport
from corehq.apps.settings.views import BaseProjectDataView
from corehq.apps.users.decorators import require_permission
from corehq.apps.users.models import Permissions
from couchexport.models import SavedExportSchema, ExportSchema
from couchexport.schema import build_latest_schema
from dimagi.utils.decorators.memoized import memoized
from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy
from dimagi.utils.web import json_response
require_form_export_permission = require_permission(
Permissions.view_report,
'corehq.apps.reports.standard.export.ExcelExportReport',
login_decorator=None
)
class BaseExportView(BaseProjectDataView):
template_name = 'export/customize_export.html'
export_type = None
is_async = True
@property
def parent_pages(self):
return [{
'title': self.report_class.name,
'url': self.export_home_url,
}]
@method_decorator(require_form_export_permission)
def dispatch(self, request, *args, **kwargs):
return super(BaseExportView, self).dispatch(request, *args, **kwargs)
@property
def export_helper(self):
raise NotImplementedError("You must implement export_helper!")
@property
def export_home_url(self):
return self.report_class.get_url(domain=self.domain)
@property
@memoized
def report_class(self):
try:
return {
'form': ExcelExportReport,
'case': CaseExportReport
}[self.export_type]
except KeyError:
raise SuspiciousOperation
@property
def page_context(self):
return self.export_helper.get_context()
def post(self, request, *args, **kwargs):
if self.is_async:
return json_response({
'redirect': self.export_home_url,
})
return HttpResponseRedirect(self.export_home_url)
class BaseCreateCustomExportView(BaseExportView):
# this view likely needs a lot more cleanup. will leave that for a later time...
@property
@memoized
def export_helper(self):
return CustomExportHelper.make(self.request, self.export_type, domain=self.domain)
def post(self, request, *args, **kwargs):
self.export_helper.update_custom_export()
messages.success(request, _("Custom export created!"))
return super(BaseCreateCustomExportView, self).post(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
# just copying what was in the old django view here. don't want to mess too much with exports just yet.
try:
export_tag = [self.domain, json.loads(request.GET.get("export_tag", "null") or "null")]
except ValueError:
return HttpResponseBadRequest()
schema = build_latest_schema(export_tag)
if not schema and self.export_helper.export_type == "form":
schema = create_basic_form_checkpoint(export_tag)
if schema:
app_id = request.GET.get('app_id')
self.export_helper.custom_export = self.export_helper.ExportSchemaClass.default(
schema=schema,
name="%s: %s" % (
xmlns_to_name(self.domain, export_tag[1], app_id=app_id)
if self.export_helper.export_type == "form" else export_tag[1],
datetime.utcnow().strftime("%Y-%m-%d")
),
type=self.export_helper.export_type
)
if self.export_helper.export_type in ['form', 'case']:
self.export_helper.custom_export.app_id = app_id
return super(BaseCreateCustomExportView, self).get(request, *args, **kwargs)
messages.warning(request, _("<strong>No data found for that form "
"(%s).</strong> Submit some data before creating an export!") %
xmlns_to_name(self.domain, export_tag[1], app_id=None), extra_tags="html")
return HttpResponseRedirect(ExcelExportReport.get_url(domain=self.domain))
class CreateCustomFormExportView(BaseCreateCustomExportView):
urlname = 'custom_export_form'
page_title = ugettext_lazy("Create Custom Form Export")
export_type = 'form'
class CreateCustomCaseExportView(BaseCreateCustomExportView):
urlname = 'custom_export_case'
page_title = ugettext_lazy("Create Custom Case Export")
export_type = 'case'
class BaseModifyCustomExportView(BaseExportView):
@property
def page_url(self):
return reverse(self.urlname, args=[self.domain, self.export_id])
@property
def export_id(self):
return self.kwargs.get('export_id')
@property
@memoized
def export_helper(self):
try:
return CustomExportHelper.make(self.request, self.export_type, self.domain, self.export_id)
except ResourceNotFound:
raise Http404()
class BaseEditCustomExportView(BaseModifyCustomExportView):
def post(self, request, *args, **kwargs):
self.export_helper.update_custom_export()
messages.success(request, _("Custom export saved!"))
return super(BaseEditCustomExportView, self).post(request, *args, **kwargs)
class EditCustomFormExportView(BaseEditCustomExportView):
urlname = 'edit_custom_export_form'
page_title = ugettext_noop("Edit Form Custom Export")
export_type = 'form'
class EditCustomCaseExportView(BaseEditCustomExportView):
urlname = 'edit_custom_export_case'
page_title = ugettext_noop("Edit Case Custom Export")
export_type = 'case'
class DeleteCustomExportView(BaseModifyCustomExportView):
urlname = 'delete_custom_export'
http_method_names = ['post']
is_async = False
def post(self, request, *args, **kwargs):
try:
saved_export = SavedExportSchema.get(self.export_id)
except ResourceNotFound:
return HttpResponseRedirect(request.META['HTTP_REFERER'])
self.export_type = saved_export.type
saved_export.delete()
messages.success(request, _("Custom export was deleted."))
return super(DeleteCustomExportView, self).post(request, *args, **kwargs)
BASIC_FORM_SCHEMA = {
"doc_type": "string",
"domain": "string",
"xmlns": "string",
"form": {
"@xmlns": "string",
"@uiVersion": "string",
"@name": "string",
"#type": "string",
"meta": {
"@xmlns": "string",
"username": "string",
"instanceID": "string",
"userID": "string",
"timeEnd": "string",
"appVersion": {
"@xmlns": "string",
"#text": "string"
},
"timeStart": "string",
"deviceID": "string"
},
"@version": "string"
},
"partial_submission": "string",
"_rev": "string",
"#export_tag": [
"string"
],
"received_on": "string",
"app_id": "string",
"last_sync_token": "string",
"submit_ip": "string",
"computed_": {
},
"openrosa_headers": {
"HTTP_DATE": "string",
"HTTP_ACCEPT_LANGUAGE": "string",
"HTTP_X_OPENROSA_VERSION": "string"
},
"date_header": "string",
"path": "string",
"computed_modified_on_": "string",
"_id": "string"
}
def create_basic_form_checkpoint(index):
checkpoint = ExportSchema(seq="0", schema=BASIC_FORM_SCHEMA, timestamp=datetime.utcnow(), index=index)
checkpoint.save()
return checkpoint
| {
"content_hash": "62b6da2cf9b199354c852cbdc825294e",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 111,
"avg_line_length": 34.355932203389834,
"alnum_prop": 0.6429452392698569,
"repo_name": "gmimano/commcaretest",
"id": "544d5bab54bda45dedd203162d8d3041cd41e564",
"size": "8108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/export/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "282577"
},
{
"name": "JavaScript",
"bytes": "2731012"
},
{
"name": "Python",
"bytes": "4738450"
},
{
"name": "Shell",
"bytes": "22454"
}
],
"symlink_target": ""
} |
import ipaddr
from enums import ServiceProtocol
class InterfaceACL:
def __init__(self, interface_name=None, acl_direction=None, acl_name=None):
self.acl_name = acl_name
self.acl_direction = acl_direction
self.interface_name =interface_name
self.entries_post_intra_acl_filtering = None
self.intra_acl_interactions = None
self.entries_post_inter_acl_filtering = None
self.inter_acl_interactions = None
class ACL:
def __init__(self, acl_name=None, entry_list=None):
self.name = acl_name
self.entry_list = entry_list
self.entries_post_intra_acl_filtering = None
self.entries_post_inter_acl_filtering = None
self.intra_acl_interactions = None
self.inter_acl_interactions = None
@property
def Entries(self):
return self.entry_list
@Entries.setter
def Entries(self, entry_list):
self.entry_list = entry_list
@property
def EntriesPostIntraACLFiltering(self):
return self.entries_post_intra_acl_filtering
@property
def EntriesPostInterACLFiltering(self):
return self.entries_post_inter_acl_filtering
@EntriesPostIntraACLFiltering.setter
def EntriesPostIntraACLFiltering(self, entries_list):
self.entries_post_intra_acl_filtering = entries_list
@EntriesPostInterACLFiltering.setter
def EntriesPostInterACLFiltering(self, entries_list):
self.entries_post_inter_acl_filtering = entries_list
@property
def IntraACLInteractions(self):
return self.intra_acl_interactions
@IntraACLInteractions.setter
def IntraACLInteractions(self, entries_list):
self.intra_acl_interactions = entries_list
@property
def InterACLInteractions(self):
return self.inter_acl_interactions
@InterACLInteractions.setter
def InterACLInteractions(self, entries_list):
self.inter_acl_interactions = entries_list
class SecurityZone:
def __init__(self,zone_id,ipaddress_list,primary_firewall_name):
self.zone_id = zone_id
self.ipaddress_list = ipaddress_list
self.sub_elements = dict()
self.gateways = []
self.primary_firewall_name = primary_firewall_name
self.excluded_elements = dict()
def ContainsSubnetOrIpaddress(self,subnet_or_ip):
included = False
if isinstance(subnet_or_ip, list):
# All elements of list must be included
inclusive=False
if len(subnet_or_ip) ==0: return False
for ip1 in subnet_or_ip:
for ip2 in self.ipaddress_list:
if ip2.__contains__(ip1):
inclusive=True
break
if not inclusive: return False
return True
else:
for ip in self.ipaddress_list:
if subnet_or_ip !=None:
if ip.__contains__(subnet_or_ip):
included = True
break
return included
def AddSubElement(self,element,confirmed):
# Check whether element has been confidently excluded previously
if self.excluded_elements.has_key(element):
if self.excluded_elements[element]:
# Confidently excluded..cannot add
return
else:
# Not confidently excluded..how confident are we including it?
if confirmed:
# Remove exclusion
self.RemoveExcludedSubElement(element)
# Include element
if not self.sub_elements.has_key(element):
self.sub_elements[element]=confirmed
else:
if confirmed:
self.sub_elements[element]=confirmed
def ContainsSubElement(self, element):
return self.sub_elements.has_key(element)
def RemoveSubElement(self,element):
if self.sub_elements.has_key(element):
self.sub_elements.__delitem__(element)
def RemoveExcludedSubElement(self,element):
if self.excluded_elements.has_key(element):
self.excluded_elements.__delitem__(element)
def AddExludedSubElement(self, element, confirmed):
# Check whether element has been confidently included previously
if self.sub_elements.has_key(element):
if self.sub_elements[element]:
# Confidently included..cannot exclude
return
else:
# Not confidently included..how confident are we excluding it?
if confirmed:
# Remove inclusion
self.RemoveSubElement(element)
# Remove from sub-element list first
#if confirmed:
# self.RemoveSubElement(element)
# Add to exclusion list
if not self.excluded_elements.__contains__(element):
self.excluded_elements[element]= confirmed
else:
if confirmed:
self.excluded_elements[element] = confirmed
def AddGateway(self, gateway):
if not self.gateways.__contains__(gateway):
self.gateways.append(gateway)
@property
def Gateways(self):
return self.gateways
@property
def GatewayNames(self):
names=[]
for gateway in self.gateways:
names.append("gw %s"%gateway.ipaddress)
return names
class SecurityConduit:
def __init__(self, conduit_id, attached_zones):
self.conduit_id=conduit_id
self.attached_zones = attached_zones
self.interfaces = None
self.firewall_architecture = None
def SetFirewallArchitecture(self, architecture):
self.firewall_architecture = architecture
def GetFirewallArchitecture(self):
return self.firewall_architecture
def GetInterfaces(self):
return self.interfaces
def SetInterfaces(self, interfaces):
self.interfaces = interfaces
def GetAttachedZones(self):
return self.attached_zones
def GetId(self):
return self.conduit_id;
class ConduitFirewallArchitecture:
def __init__(self):
self.parallelFirewallPaths = []
def AddParallelFirewallPath(self, path):
if(not self.parallelFirewallPaths.__contains__(path)):
self.parallelFirewallPaths.append(path)
def GetParallelFirewallPaths(self):
return self.parallelFirewallPaths
def GetBoundaryFirewallsForZone(self, zone):
pass
class ACE:
def __init__(self, rule_action, protocol, source, source_port_filter, dest, dest_port_filter, icmp_type, rule_core=None):
self.rule_action = rule_action
self.protocol = protocol
self.source = source
self.source_port_filter = source_port_filter
self.dest = dest
self.dest_port_filter = dest_port_filter
self.rule_core = rule_core
self.icmp_type=icmp_type
@property
def RuleCore(self):
return self.rule_core
@property
def Action(self):
return self.rule_action
@property
def Protocol(self):
return self.protocol
@property
def Source(self):
return self.source
@property
def Dest(self):
return self.dest
@property
def SourcePortFilter(self):
return self.source_port_filter
@property
def DestPortFilter(self):
return self.dest_port_filter
class AtomicACE:
def __init__(self, rule_action, protocols, source_ip, source_ports, dest_ip, dest_ports, icmp_type, entry):
self.rule_action = rule_action
self.protocols = protocols
self.source_ip = source_ip
self.source_ports = source_ports
self.dest_ip = dest_ip
self.dest_ports = dest_ports
self.entry=entry
self.icmp_type=icmp_type
self.interaction_type=None
self.first_source_port = None
self.first_dest_port = None
if self.source_ports != None and len(self.source_ports)>0:
self.first_source_port = self.source_ports[0]
if self.dest_ports != None and len(self.dest_ports)>0:
self.first_dest_port = self.dest_ports[0]
@property
def Action(self):
return self.rule_action
@property
def Protocols(self):
return self.protocols
@property
def SourceIp(self):
return self.source_ip
@property
def DestIp(self):
return self.dest_ip
@property
def SourcePort(self):
return self.first_source_port
@property
def DestPort(self):
return self.first_dest_port
@property
def SourcePortFilter(self):
return self.source_ports
@property
def DestPortFilter(self):
return self.dest_ports
def Overlaps(self, acl_rule):
if acl_rule != None:
if ((acl_rule.Protocols == self.Protocols and acl_rule.Action == self.Action) or
(acl_rule.Protocols.__contains__(ServiceProtocol.ip) or self.Protocols.__contains__(ServiceProtocol.ip) and acl_rule.Action==self.Action)) :
#Temp eigrp overlaps are heavy!!
'''
if acl_rule.Protocols.__contains__(ServiceProtocol.eigrp) or self.Protocols.__contains__(ServiceProtocol.eigrp):
print("eigrp overlaps")
return False'''
# intersections of the following properties must be non null
#..source ips
#..source ports
#..dest ips
#..dest ports
# debug
#print('acl_rule: %s %s %s %s %s %s'%(acl_rule.Action,acl_rule.Protocols,acl_rule.SourceIp,acl_rule.SourcePortFilter,acl_rule.DestIp,acl_rule.DestPortFilter))
#print('other_rule: %s %s %s %s %s %s'%(self.Action,self.Protocols,self.SourceIp,self.SourcePortFilter,self.DestIp,self.DestPortFilter))
if(self.IsIpaddressOverlap(acl_rule.SourceIp, self.SourceIp)and
self.IsPortOverlap(acl_rule.SourcePortFilter, self.SourcePortFilter) and
self.IsIpaddressOverlap(acl_rule.DestIp, self.DestIp) and
self.IsPortOverlap(acl_rule.DestPortFilter, self.DestPortFilter) and
acl_rule.icmp_type==self.icmp_type):
#print("Overlap detected1: source_ip- %s~%s dest_ip- %s~%s source_port- %s~%s dest_port- %s~%s"%(acl_rule.SourceIp,self.SourceIp,acl_rule.SourcePortFilter,self.SourcePortFilter, acl_rule.DestIp, self.DestIp, acl_rule.DestPortFilter, self.DestPortFilter))
return True
elif (self.IsIpaddressOverlap(acl_rule.SourceIp, self.SourceIp)and
self.IsIpaddressOverlap(acl_rule.DestIp, self.DestIp) and
((acl_rule.Protocols.__contains__(ServiceProtocol.ip) or self.Protocols.__contains__(ServiceProtocol.ip)) and acl_rule.Action==self.Action)):
#print("Overlap detected2: source_ip- %s~%s dest_ip- %s~%s source_port- %s~%s dest_port- %s~%s"%(acl_rule.SourceIp,self.SourceIp,acl_rule.SourcePortFilter,self.SourcePortFilter, acl_rule.DestIp, self.DestIp, acl_rule.DestPortFilter, self.DestPortFilter))
return True
#print("No Overlap.")
return False
def Conflicts(self, acl_rule):
if acl_rule != None:
if ((acl_rule.Protocols == self.Protocols and acl_rule.Action != self.Action) or
(acl_rule.Protocols.__contains__(ServiceProtocol.ip) or self.Protocols.__contains__(ServiceProtocol.ip) and acl_rule.Action!=self.Action)) :
#Temp eigrp overlaps are heavy!!
'''
if acl_rule.Protocols.__contains__(ServiceProtocol.eigrp) or self.Protocols.__contains__(ServiceProtocol.eigrp):
print("eigrp conflicts")
return False'''
# intersections of the following properties must be non null
#..source ips
#..source ports
#..dest ips
#..dest ports
if(self.IsIpaddressOverlap(acl_rule.SourceIp, self.SourceIp)and
self.IsPortOverlap(acl_rule.SourcePortFilter, self.SourcePortFilter) and
self.IsIpaddressOverlap(acl_rule.DestIp, self.DestIp) and
self.IsPortOverlap(acl_rule.DestPortFilter, self.DestPortFilter)and
acl_rule.icmp_type==self.icmp_type):
return True
return False
def IsShadowedBy(self, acl_rule):
if acl_rule != None:
if ((acl_rule.protocols == self.protocols) or
acl_rule.Protocols.__contains__(ServiceProtocol.ip)):
if (self.IsIpSubset(acl_rule.SourceIp, self.SourceIp) and
self.IsPortSubset(acl_rule.SourcePortFilter, self.SourcePortFilter) and
self.IsIpSubset(acl_rule.DestIp, self.DestIp) and
self.IsPortSubset(acl_rule.DestPortFilter, self.DestPortFilter)):
return True
return False
def IsIpaddressOverlap(self, subnet1, subnet2):
if subnet1 == None or subnet2==None: return False
temp1=subnet1
temp2=subnet2
if isinstance(subnet1, ipaddr.IPv4Address):
temp1 = ipaddr.IPv4Network(subnet1)
if isinstance(subnet2, ipaddr.IPv4Address):
temp2 = ipaddr.IPv4Network(subnet2)
return temp1.overlaps(temp2)
def IsPortOverlap(self, ports1, ports2):
#TODO: add-in range checking
#if ports1 != None and ports2 !=None:
return ports1 == ports2
#return False
def IsIpSubset(self, subnet1, subnet2):
# Checks whether subnet2 is contained wholly within subnet1
subnet1_address=subnet1
subnet2_address=subnet2
if isinstance(subnet1, ipaddr.IPv4Address):
subnet1_address=ipaddr.IPv4Network(subnet1)
if isinstance(subnet2, ipaddr.IPv4Address):
subnet2_address=ipaddr.IPv4Network(subnet2)
return subnet1_address.__contains__(subnet2_address)
def IsPortSubset(self, ports1, ports2):
#TODO: add-in range checking
return ports1 == ports2
class LowlevelACE:
def __init__(self, action, ip_protocols, source_ip_range, source_port_range, dest_ip_range, dest_port_range, icmp_type, entry_string):
# action - 1(permit), 2(deny)
# ip_protocols - [(start,end)] 0(ip), 1(icmp),...... 255(reserved) these are the standard ip protocol numbers
# source_ip_range, dest_ip_range - [(start,end)] ipv4 addresses
# source_port_range, dest_port_range - [(start,end)] 0-65535
# icmp_type - 0(echo_reply), 3(dest-unreachable)....255 (reserved)
self.action = action
self.ip_protocols = ip_protocols
self.source_ip_range=source_ip_range
self.source_port_range=source_port_range
self.dest_ip_range=dest_ip_range
self.dest_port_range=dest_port_range
self.icmp_type=icmp_type
self.entry_string = entry_string
@property
def Action(self):
return self.action
@property
def Protocols(self):
return self.ip_protocols
@property
def SourceIpRange(self):
return self.source_ip_range
@property
def SourcePortRange(self):
return self.source_port_range
@property
def DestIpRange(self):
return self.dest_ip_range
@property
def DestPortRange(self):
return self.dest_port_range
@property
def IcmpType(self):
return self.icmp_type
@property
def RuleEntry(self):
return self.entry_string
class RuleInteraction:
def __init__(self, rule):
self.current_rule = rule
self.type = None
self.net_effect = None
@property
def Rule(self):
return self.current_rule
@property
def Type(self):
return self.type
@property
def NetEffect(self):
return self.net_effect
@NetEffect.setter
def NetEffect(self, value):
self.net_effect = value
@Type.setter
def Type(self, value):
self.type = value
| {
"content_hash": "cfb0e7408185754a24c30c195a7c57a6",
"timestamp": "",
"source": "github",
"line_count": 476,
"max_line_length": 273,
"avg_line_length": 34.13025210084034,
"alnum_prop": 0.6185522590176044,
"repo_name": "dinesharanathunga/firewallconfigdecryptor",
"id": "9f7a27f0b05be857335243306386ca608ffee52a",
"size": "16246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "firewallconfigdecryptor/security.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "4984"
},
{
"name": "Python",
"bytes": "271710"
},
{
"name": "Shell",
"bytes": "1"
}
],
"symlink_target": ""
} |
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('wikiapi.config')
app.config.from_pyfile('application.cfg', silent=True)
db = SQLAlchemy(app)
from wikiapi import views, models
db.create_all() | {
"content_hash": "bb19bbfb8690992a2258522ddce61b9e",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 54,
"avg_line_length": 26,
"alnum_prop": 0.7762237762237763,
"repo_name": "Evgenus/wiki-rest-json-api-test",
"id": "bc3e60bbd1a2d51631f65d8c6968ac804b019c1b",
"size": "286",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wikiapi/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "572"
},
{
"name": "Python",
"bytes": "18894"
}
],
"symlink_target": ""
} |
"""
Slovenian specific form helpers.
"""
from __future__ import absolute_import, unicode_literals
import datetime
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import CharField, Select, ChoiceField
from django.utils.translation import ugettext_lazy as _
from .si_postalcodes import SI_POSTALCODES_CHOICES
class SIEMSOField(CharField):
"""
A form for validating Slovenian personal identification number.
Additionally stores gender, nationality and birthday to self.info dictionary.
"""
default_error_messages = {
'invalid': _('This field should contain exactly 13 digits.'),
'date': _('The first 7 digits of the EMSO must represent a valid past date.'),
'checksum': _('The EMSO is not valid.'),
}
emso_regex = re.compile('^(\d{2})(\d{2})(\d{3})(\d{2})(\d{3})(\d)$')
def clean(self, value):
super(SIEMSOField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = value.strip()
m = self.emso_regex.match(value)
if m is None:
raise ValidationError(self.default_error_messages['invalid'])
# Validate EMSO
s = 0
int_values = [int(i) for i in value]
for a, b in zip(int_values, list(range(7, 1, -1)) * 2):
s += a * b
chk = s % 11
if chk == 0:
K = 0
else:
K = 11 - chk
if K == 10 or int_values[-1] != K:
raise ValidationError(self.default_error_messages['checksum'])
# Extract extra info in the identification number
day, month, year, nationality, gender, chksum = [int(i) for i in m.groups()]
if year < 890:
year += 2000
else:
year += 1000
# validate birthday
try:
birthday = datetime.date(year, month, day)
except ValueError:
raise ValidationError(self.error_messages['date'])
if datetime.date.today() < birthday:
raise ValidationError(self.error_messages['date'])
self.info = {
'gender': gender < 500 and 'male' or 'female',
'birthdate': birthday,
'nationality': nationality,
}
return value
class SITaxNumberField(CharField):
"""
Slovenian tax number field.
Valid input is SIXXXXXXXX or XXXXXXXX where X is a number.
"""
default_error_messages = {
'invalid': _('Enter a valid tax number in form SIXXXXXXXX'),
}
sitax_regex = re.compile('^(?:SI)?([1-9]\d{7})$')
def clean(self, value):
super(SITaxNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = value.strip()
m = self.sitax_regex.match(value)
if m is None:
raise ValidationError(self.default_error_messages['invalid'])
value = m.groups()[0]
# Validate Tax number
s = 0
int_values = [int(i) for i in value]
for a, b in zip(int_values, range(8, 1, -1)):
s += a * b
chk = 11 - (s % 11)
if chk == 10:
chk = 0
if int_values[-1] != chk:
raise ValidationError(self.default_error_messages['invalid'])
return value
class SIPostalCodeField(ChoiceField):
"""
Slovenian post codes field.
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('choices', SI_POSTALCODES_CHOICES)
super(SIPostalCodeField, self).__init__(*args, **kwargs)
class SIPostalCodeSelect(Select):
"""
A Select widget that uses Slovenian postal codes as its choices.
"""
def __init__(self, attrs=None):
super(SIPostalCodeSelect, self).__init__(attrs,
choices=SI_POSTALCODES_CHOICES)
class SIPhoneNumberField(CharField):
"""
Slovenian phone number field.
Phone number must contain at least local area code.
Country code can be present.
Examples:
* +38640XXXXXX
* 0038640XXXXXX
* 040XXXXXX
* 01XXXXXX
* 0590XXXXX
"""
default_error_messages = {
'invalid': _('Enter phone number in form +386XXXXXXXX or 0XXXXXXXX.'),
}
phone_regex = re.compile('^(?:(?:00|\+)386|0)(\d{7,8})$')
def clean(self, value):
super(SIPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = value.replace(' ', '').replace('-', '').replace('/', '')
m = self.phone_regex.match(value)
if m is None:
raise ValidationError(self.default_error_messages['invalid'])
return m.groups()[0]
| {
"content_hash": "4b80fa8ee8a564ad2414637c873afe03",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 86,
"avg_line_length": 27.711764705882352,
"alnum_prop": 0.5773721078327319,
"repo_name": "yakky/django-localflavor",
"id": "fc15ed8dc3f8f894afaff7b72d9d79b6268f4463",
"size": "4711",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "localflavor/si/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "573411"
},
{
"name": "Shell",
"bytes": "6725"
}
],
"symlink_target": ""
} |
"""
lassie.filters.generic
~~~~~~~~~~~~~~~~~~~~~~
This module contains data about generic type content to help Lassie filter for content.
"""
from ..compat import str
import re
GENERIC_MAPS = {
'meta': {
'generic': {
'pattern': re.compile(r"^(description|keywords|title)", re.I),
'map': {
'description': 'description',
'keywords': 'keywords',
'title': 'title',
},
'image_key': '',
'video_key': '',
'key': 'name',
},
},
'link': {
'favicon': {
'pattern': 'icon',
'key': 'rel',
'type': str('favicon'),
},
},
}
| {
"content_hash": "f2b1f87249e89f3e3cf77538606bc8cc",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 87,
"avg_line_length": 21.058823529411764,
"alnum_prop": 0.42178770949720673,
"repo_name": "jpadilla/lassie",
"id": "07a979f1a2aa0fb9022bd509d29d243ef3081f77",
"size": "741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lassie/filters/generic.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7571"
},
{
"name": "Python",
"bytes": "30813"
}
],
"symlink_target": ""
} |
{
"name": "Kubik CRM Customer User Linked",
"summary": "Add customer linked user to kubik contacts",
"version": "9.0.1.1.0",
"category": "Customer Relationship Management",
"website": "http://vileo.co.id",
"author": "Suhendar",
"contributors": [
'Suhendar',
],
"license": "AGPL-3",
'application': False,
'installable': True,
'auto_install': False,
"depends": [
"base","crm",
],
"data": [
"views/tab_page_linked_user.xml",
"views/res_partner.xml"
],
}
| {
"content_hash": "3ceb8d46159d030063a6f24a24b3051e",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 60,
"avg_line_length": 23.82608695652174,
"alnum_prop": 0.541970802919708,
"repo_name": "vileopratama/vitech",
"id": "73e51b88b76f38458cc4d37d948b26727191424f",
"size": "548",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/addons/partner_contact_linked_user_kubik/__openerp__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "9611"
},
{
"name": "CSS",
"bytes": "2125999"
},
{
"name": "HTML",
"bytes": "252393"
},
{
"name": "Java",
"bytes": "1840167"
},
{
"name": "JavaScript",
"bytes": "6176224"
},
{
"name": "Makefile",
"bytes": "19072"
},
{
"name": "Mako",
"bytes": "7659"
},
{
"name": "NSIS",
"bytes": "16782"
},
{
"name": "Python",
"bytes": "9438805"
},
{
"name": "Ruby",
"bytes": "220"
},
{
"name": "Shell",
"bytes": "22312"
},
{
"name": "Vim script",
"bytes": "406"
},
{
"name": "XSLT",
"bytes": "11489"
}
],
"symlink_target": ""
} |
"""
:module: watchdog.observers.inotify
:synopsis: ``inotify(7)`` based emitter implementation.
:author: Sebastien Martini <seb@dbzteam.org>
:author: Luke McCarthy <luke@iogopro.co.uk>
:author: yesudeep@google.com (Yesudeep Mangalapilly)
:author: Tim Cuthbertson <tim+github@gfxmonk.net>
:platforms: Linux 2.6.13+.
.. ADMONITION:: About system requirements
Recommended minimum kernel version: 2.6.25.
Quote from the inotify(7) man page:
"Inotify was merged into the 2.6.13 Linux kernel. The required library
interfaces were added to glibc in version 2.4. (IN_DONT_FOLLOW,
IN_MASK_ADD, and IN_ONLYDIR were only added in version 2.5.)"
Therefore, you must ensure the system is running at least these versions
appropriate libraries and the kernel.
.. ADMONITION:: About recursiveness, event order, and event coalescing
Quote from the inotify(7) man page:
If successive output inotify events produced on the inotify file
descriptor are identical (same wd, mask, cookie, and name) then they
are coalesced into a single event if the older event has not yet been
read (but see BUGS).
The events returned by reading from an inotify file descriptor form
an ordered queue. Thus, for example, it is guaranteed that when
renaming from one directory to another, events will be produced in
the correct order on the inotify file descriptor.
...
Inotify monitoring of directories is not recursive: to monitor
subdirectories under a directory, additional watches must be created.
This emitter implementation therefore automatically adds watches for
sub-directories if running in recursive mode.
Some extremely useful articles and documentation:
.. _inotify FAQ: http://inotify.aiken.cz/?section=inotify&page=faq&lang=en
.. _intro to inotify: http://www.linuxjournal.com/article/8478
"""
from __future__ import with_statement
import os
import threading
from .inotify_buffer import InotifyBuffer
from watchdog.observers.api import (
EventEmitter,
BaseObserver,
DEFAULT_EMITTER_TIMEOUT,
DEFAULT_OBSERVER_TIMEOUT
)
from watchdog.events import (
DirDeletedEvent,
DirModifiedEvent,
DirMovedEvent,
DirCreatedEvent,
FileDeletedEvent,
FileModifiedEvent,
FileMovedEvent,
FileCreatedEvent,
generate_sub_moved_events,
generate_sub_created_events,
)
from watchdog.utils import unicode_paths
class InotifyEmitter(EventEmitter):
"""
inotify(7)-based event emitter.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._lock = threading.Lock()
self._inotify = None
def on_thread_start(self):
path = unicode_paths.encode(self.watch.path)
self._inotify = InotifyBuffer(path, self.watch.is_recursive)
def on_thread_stop(self):
if self._inotify:
self._inotify.close()
def queue_events(self, timeout):
with self._lock:
event = self._inotify.read_event()
if event is None:
return
if isinstance(event, tuple):
move_from, move_to = event
src_path = self._decode_path(move_from.src_path)
dest_path = self._decode_path(move_to.src_path)
cls = DirMovedEvent if move_from.is_directory else FileMovedEvent
self.queue_event(cls(src_path, dest_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
self.queue_event(DirModifiedEvent(os.path.dirname(dest_path)))
if move_from.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_moved_events(src_path, dest_path):
self.queue_event(sub_event)
return
src_path = self._decode_path(event.src_path)
if event.is_moved_to:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
if event.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_created_events(src_path):
self.queue_event(sub_event)
elif event.is_attrib:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_modify:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_delete_self:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(src_path))
elif event.is_delete or event.is_moved_from:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
elif event.is_create:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
def _decode_path(self, path):
""" Decode path only if unicode string was passed to this emitter. """
if isinstance(self.watch.path, bytes):
return path
return unicode_paths.decode(path)
class InotifyObserver(BaseObserver):
"""
Observer thread that schedules watching directories and dispatches
calls to event handlers.
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=InotifyEmitter,
timeout=timeout)
| {
"content_hash": "804bde6473350fd048665fff28c6754e",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 84,
"avg_line_length": 37.57142857142857,
"alnum_prop": 0.6506653992395437,
"repo_name": "piagarwal11/GDriveLinuxClient",
"id": "65fa7d51cd8cad94742a9f9875ad4d40ef20f10e",
"size": "6997",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "src/watchdog-0.8.2/build/lib.linux-x86_64-2.7/watchdog/observers/inotify.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "4519"
},
{
"name": "C",
"bytes": "53212"
},
{
"name": "HTML",
"bytes": "2538"
},
{
"name": "Makefile",
"bytes": "4602"
},
{
"name": "Python",
"bytes": "480993"
}
],
"symlink_target": ""
} |
"""cifar100_n dataset."""
from tensorflow_datasets import testing
from tensorflow_datasets.image_classification.cifar100_n import cifar100_n
class Cifar100NTest(testing.DatasetBuilderTestCase):
"""Tests for cifar100_n dataset."""
DATASET_CLASS = cifar100_n.Cifar100N
SPLITS = {
'train': 10, # Number of fake train example
'test': 2, # Number of fake test example
}
if __name__ == '__main__':
testing.test_main()
| {
"content_hash": "4fde5081a7a52361146dc5caa2933e3b",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 74,
"avg_line_length": 25.941176470588236,
"alnum_prop": 0.6961451247165533,
"repo_name": "tensorflow/datasets",
"id": "8f2627b66c7f5f8f2fce99a04fd40820f3aa6960",
"size": "1053",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow_datasets/image_classification/cifar100_n/cifar100_n_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "728"
},
{
"name": "JavaScript",
"bytes": "13369"
},
{
"name": "NewLisp",
"bytes": "13940"
},
{
"name": "Perl",
"bytes": "520"
},
{
"name": "Python",
"bytes": "5398856"
},
{
"name": "Roff",
"bytes": "22095"
},
{
"name": "Ruby",
"bytes": "25669"
},
{
"name": "Shell",
"bytes": "3895"
},
{
"name": "Smalltalk",
"bytes": "20604"
},
{
"name": "TeX",
"bytes": "759"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from mozdns.address_record.models import AddressRecord
from mozdns.cname.models import CNAME
from mozdns.txt.models import TXT
from mozdns.mx.models import MX
from mozdns.srv.models import SRV
from mozdns.domain.models import Domain
from mozdns.nameserver.models import Nameserver
from mozdns.utils import ensure_label_domain, prune_tree
from mozdns.soa.models import SOA
from mozdns.tests.utils import create_fake_zone
class FullNameTests(TestCase):
def setUp(self):
Domain.objects.all().delete()
def test_basic_add_remove1(self):
c = Domain(name='com')
c.save()
self.assertFalse(c.purgeable)
f_c = Domain(name='foo.com')
s, _ = SOA.objects.get_or_create(primary="foo", contact="foo",
description="foo.zfoo.comom")
f_c.soa = s
f_c.save()
self.assertFalse(f_c.purgeable)
fqdn = "bar.x.y.z.foo.com"
label, the_domain = ensure_label_domain(fqdn)
self.assertEqual(label, "bar")
self.assertEqual(the_domain.name, "x.y.z.foo.com")
self.assertTrue(the_domain.purgeable)
self.assertEqual(the_domain.master_domain.name, "y.z.foo.com")
self.assertTrue(the_domain.master_domain.purgeable)
self.assertEqual(
the_domain.master_domain.master_domain.name, "z.foo.com")
self.assertTrue(the_domain.master_domain.master_domain.purgeable)
self.assertEqual(
the_domain.master_domain.master_domain.master_domain.name,
"foo.com")
self.assertFalse(
the_domain.master_domain.master_domain.master_domain.purgeable)
# Now call prune tree one the_domain
self.assertTrue(prune_tree(the_domain))
self.assertFalse(Domain.objects.filter(name="x.y.z.foo.com"))
self.assertFalse(Domain.objects.filter(name="y.z.foo.com"))
self.assertFalse(Domain.objects.filter(name="z.foo.com"))
self.assertTrue(Domain.objects.filter(name="foo.com"))
# Make sure other domain's can't be pruned
self.assertFalse(prune_tree(f_c))
self.assertTrue(Domain.objects.filter(name="foo.com"))
self.assertFalse(prune_tree(c))
self.assertTrue(Domain.objects.filter(name="com"))
def test_basic_add_remove2(self):
# MAke sure that if a domain is set to not purgeable the prune stops at
# that domain.
c = Domain(name='edu')
c.save()
self.assertFalse(c.purgeable)
f_c = Domain(name='foo.edu')
s, _ = SOA.objects.get_or_create(primary="foo", contact="foo",
description="foo.edu")
f_c.soa = s
f_c.save()
self.assertFalse(f_c.purgeable)
fqdn = "bar.x.y.z.foo.edu"
label, the_domain = ensure_label_domain(fqdn)
self.assertEqual(label, "bar")
self.assertEqual(the_domain.name, "x.y.z.foo.edu")
self.assertTrue(the_domain.purgeable)
self.assertEqual(the_domain.master_domain.name, "y.z.foo.edu")
self.assertTrue(the_domain.master_domain.purgeable)
self.assertEqual(
the_domain.master_domain.master_domain.name, "z.foo.edu")
self.assertTrue(the_domain.master_domain.master_domain.purgeable)
self.assertEqual(
the_domain.master_domain.master_domain.master_domain.name,
"foo.edu")
self.assertFalse(
the_domain.master_domain.master_domain.master_domain.purgeable)
# See if purgeable stops prune
the_domain.purgeable = False
the_domain.save()
self.assertFalse(prune_tree(the_domain))
the_domain.purgeable = True
the_domain.save()
# Ok, reset
y_z = Domain.objects.get(name="y.z.foo.edu")
y_z.purgeable = False
y_z.save()
# Refresh the domain
the_domain = Domain.objects.get(pk=the_domain.pk)
# This should delete up to and stop at the domain "y.z.foo.edu"
self.assertTrue(prune_tree(the_domain))
self.assertFalse(Domain.objects.filter(name="x.y.z.foo.edu"))
self.assertTrue(Domain.objects.filter(name="y.z.foo.edu"))
self.assertTrue(Domain.objects.filter(name="z.foo.edu"))
self.assertTrue(Domain.objects.filter(name="foo.edu"))
# If we delete y.z.foo.com and then call prune on z.foo.com is should
# delete z.foo.com
Domain.objects.get(name="y.z.foo.edu").delete()
self.assertTrue(prune_tree(Domain.objects.get(name="z.foo.edu")))
self.assertFalse(Domain.objects.filter(name="z.foo.edu"))
self.assertTrue(Domain.objects.filter(name="foo.edu"))
def test_basic_add_remove3(self):
# MAke sure that if a domain is set to not purgeable the prune stops at
# that domain when a record exists in a domain
f_c = create_fake_zone("foo.foo", suffix="")
self.assertFalse(f_c.purgeable)
fqdn = "bar.x.y.z.foo.foo"
label, the_domain = ensure_label_domain(fqdn)
txt = TXT(label=label, domain=the_domain, txt_data="Nthing")
txt.save()
self.assertTrue(the_domain.purgeable)
# txt makes the domain un-purgeable.
self.assertFalse(prune_tree(the_domain))
txt.delete()
# The tree should have pruned itself
# Make sure stuff was deleted.
self.assertFalse(Domain.objects.filter(name="x.y.z.foo.foo"))
self.assertFalse(Domain.objects.filter(name="y.z.foo.foo"))
self.assertFalse(Domain.objects.filter(name="z.foo.foo"))
self.assertTrue(Domain.objects.filter(name="foo.foo"))
def test_basic_add_remove4(self):
# Move a record down the tree testing prune's ability to not delete
# stuff.
f_c = create_fake_zone("foo.goo", suffix="")
self.assertFalse(f_c.purgeable)
fqdn = "bar.x.y.z.foo.goo"
label, the_domain = ensure_label_domain(fqdn)
txt = TXT(label=label, domain=the_domain, txt_data="Nthing")
txt.save()
self.assertTrue(the_domain.purgeable)
# txt makes the domain un-purgeable.
self.assertFalse(prune_tree(the_domain))
txt.domain = the_domain.master_domain
the_next_domain = the_domain.master_domain
txt.save()
self.assertFalse(Domain.objects.filter(pk=the_domain.pk))
# We should be able to delete now.
self.assertTrue(prune_tree(the_domain))
the_domain = the_next_domain
# txt makes the domain un-purgeable. y.z.foo.com
self.assertFalse(prune_tree(the_domain))
txt.domain = the_domain.master_domain
the_next_domain = the_domain.master_domain
txt.save()
self.assertFalse(Domain.objects.filter(pk=the_domain.pk))
# We should be able to delete now.
the_domain = the_next_domain
# txt makes the domain un-purgeable. z.foo.com
self.assertFalse(prune_tree(the_domain))
txt.domain = the_domain.master_domain
the_next_domain = the_domain.master_domain
txt.save()
self.assertFalse(Domain.objects.filter(pk=the_domain.pk))
# We should be able to delete now.
the_domain = the_next_domain
# txt makes the domain un-purgeable. foo.com
self.assertFalse(prune_tree(the_domain))
def test_basic_add_remove5(self):
# Make sure all record types block
f_c = create_fake_zone("foo.foo22", suffix="")
self.assertFalse(f_c.purgeable)
fqdn = "bar.x.y.z.foo.foo22"
label, the_domain = ensure_label_domain(fqdn)
txt = TXT(label=label, domain=the_domain, txt_data="Nthing")
txt.save()
self.assertFalse(prune_tree(the_domain))
txt.delete()
label, the_domain = ensure_label_domain(fqdn)
addr = AddressRecord(label=label, domain=the_domain,
ip_type='4', ip_str="10.2.3.4")
addr.save()
self.assertFalse(prune_tree(the_domain))
addr.delete()
label, the_domain = ensure_label_domain(fqdn)
mx = MX(label=label, domain=the_domain, server="foo", priority=4)
mx.save()
self.assertFalse(prune_tree(the_domain))
mx.delete()
label, the_domain = ensure_label_domain(fqdn)
ns = Nameserver(domain=the_domain, server="asdfasffoo")
ns.save()
self.assertFalse(prune_tree(the_domain))
ns.delete()
label, the_domain = ensure_label_domain(fqdn)
srv = SRV(
label='_' + label, domain=the_domain, target="foo", priority=4,
weight=4, port=34)
srv.save()
self.assertFalse(prune_tree(the_domain))
srv.delete()
def test_basic_add_remove6(self):
# Make sure CNAME record block
f_c = create_fake_zone("foo.foo1", suffix="")
f_c.save()
self.assertFalse(f_c.purgeable)
fqdn = "cname.x.y.z.foo.foo1"
label, the_domain = ensure_label_domain(fqdn)
cname = CNAME(label=label, domain=the_domain, target="foo")
cname.save()
self.assertFalse(prune_tree(the_domain))
cname.delete()
def test_basic_add_remove7(self):
# try a star record
f_c = create_fake_zone("foo.foo2", suffix="")
f_c.save()
self.assertFalse(f_c.purgeable)
fqdn = "*.x.y.z.foo.foo2"
label, the_domain = ensure_label_domain(fqdn)
self.assertEqual('*', label)
cname = CNAME(label=label, domain=the_domain, target="foo")
cname.save()
self.assertFalse(prune_tree(the_domain))
cname.delete()
def test_basic_add_remove8(self):
# Make sure a record's label is changed to '' when a domain with the
# same name as it's fqdn is created.
f_c = create_fake_zone("foo.foo3", suffix="")
f_c.save()
self.assertFalse(f_c.purgeable)
fqdn = "www.x.y.z.foo.foo3"
label, the_domain = ensure_label_domain(fqdn)
self.assertEqual('www', label)
self.assertEqual('x.y.z.foo.foo3', the_domain.name)
self.assertTrue(the_domain.pk)
cname = CNAME(label=label, domain=the_domain, target="foo")
cname.save()
fqdn = "*.www.x.y.z.foo.foo3"
label2, the_domain2 = ensure_label_domain(fqdn)
cname = CNAME.objects.get(fqdn=cname.fqdn)
self.assertEqual('', cname.label)
self.assertEqual('www.x.y.z.foo.foo3', cname.domain.name)
self.assertEqual('*', label2)
self.assertEqual('www.x.y.z.foo.foo3', the_domain2.name)
def test_basic_add_remove9(self):
# Make sure all record types block
f_c = create_fake_zone("foo.foo22", suffix="")
self.assertFalse(f_c.purgeable)
fqdn = "y.z.foo.foo22"
label, the_domain = ensure_label_domain(fqdn)
addr = AddressRecord(label=label, domain=the_domain,
ip_type='4', ip_str="10.2.3.4")
addr.save()
self.assertFalse(prune_tree(the_domain))
f_c = create_fake_zone("y.z.foo.foo22", suffix="")
self.assertFalse(f_c.purgeable)
| {
"content_hash": "972966d0ab35aef161fccca1b25342cb",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 79,
"avg_line_length": 39.32394366197183,
"alnum_prop": 0.6169412607449857,
"repo_name": "rtucker-mozilla/inventory",
"id": "c72dbb8b3acb24741bdb4bfc078c83b16556797d",
"size": "11168",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mozdns/domain/tests/full_name.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5104"
},
{
"name": "CSS",
"bytes": "362837"
},
{
"name": "CoffeeScript",
"bytes": "9538"
},
{
"name": "HTML",
"bytes": "1195738"
},
{
"name": "JavaScript",
"bytes": "1530665"
},
{
"name": "Makefile",
"bytes": "14421"
},
{
"name": "PHP",
"bytes": "27273"
},
{
"name": "Python",
"bytes": "3642241"
},
{
"name": "Shell",
"bytes": "1783"
}
],
"symlink_target": ""
} |
"""Support for Sky Hub."""
from __future__ import annotations
import logging
from pyskyqhub.skyq_hub import SkyQHub
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend({vol.Optional(CONF_HOST): cv.string})
async def async_get_scanner(
hass: HomeAssistant, config: ConfigType
) -> DeviceScanner | None:
"""Return a Sky Hub scanner if successful."""
host = config[DOMAIN].get(CONF_HOST, "192.168.1.254")
websession = async_get_clientsession(hass)
hub = SkyQHub(websession, host)
_LOGGER.debug("Initialising Sky Hub")
await hub.async_connect()
if hub.success_init:
scanner = SkyHubDeviceScanner(hub)
return scanner
return None
class SkyHubDeviceScanner(DeviceScanner):
"""This class queries a Sky Hub router."""
def __init__(self, hub):
"""Initialise the scanner."""
self._hub = hub
self.last_results = {}
async def async_scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
await self._async_update_info()
return [device.mac for device in self.last_results]
async def async_get_device_name(self, device):
"""Return the name of the given device."""
name = next(
(result.name for result in self.last_results if result.mac == device),
None,
)
return name
async def async_get_extra_attributes(self, device):
"""Get extra attributes of a device."""
device = next(
(result for result in self.last_results if result.mac == device), None
)
if device is None:
return {}
return device.asdict()
async def _async_update_info(self):
"""Ensure the information from the Sky Hub is up to date."""
_LOGGER.debug("Scanning")
if not (data := await self._hub.async_get_skyhub_data()):
return
self.last_results = data
| {
"content_hash": "5de4c36b93211420d77fae3517f2d8e5",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 85,
"avg_line_length": 29.9375,
"alnum_prop": 0.6609603340292276,
"repo_name": "rohitranjan1991/home-assistant",
"id": "731baddcdb497007ce702a134758cdc00de54c29",
"size": "2395",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sky_hub/device_tracker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
} |
import csv
import six
from copy import deepcopy
if six.PY3:
from io import StringIO
else:
from StringIO import StringIO
from .base import BaseSmartCSVTestCase
from .config import COLUMNS_1, IPHONE_DATA, IPAD_DATA, VALID_TEMPLATE_STR
import smartcsv
class ValidCSVWithHeaderTestCase(BaseSmartCSVTestCase):
def test_valid_csv_with_all_data_and_header(self):
"""Should be valid if all data is passed"""
csv_data = """
title,category,subcategory,currency,price,url,image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**IPHONE_DATA),
ipad_data=VALID_TEMPLATE_STR.format(**IPAD_DATA),
)
reader = smartcsv.reader(StringIO(csv_data), columns=COLUMNS_1)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertModelsEquals(iphone, IPHONE_DATA)
self.assertModelsEquals(ipad, IPAD_DATA)
def test_valid_csv_with_columns_names_with_whitespaces(self):
"""Should be valid if all data is passed"""
csv_data = """
title,category,subcategory,currency,price,url,Image URL
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**IPHONE_DATA),
ipad_data=VALID_TEMPLATE_STR.format(**IPAD_DATA),
)
columns = COLUMNS_1[:]
image_column = columns[-1].copy()
image_column['name'] = "Image URL"
columns = columns[:-1] + [image_column]
reader = smartcsv.reader(StringIO(csv_data), columns=columns)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(
isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertEqual(iphone['Image URL'], 'http://apple.com/iphone.jpg')
self.assertEqual(ipad['Image URL'], 'http://apple.com/ipad.jpg')
def test_valid_csv_with_blank_lines(self):
"""Should be valid even if there are blank lines"""
csv_data = """
title,category,subcategory,currency,price,url,image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**IPHONE_DATA),
ipad_data=VALID_TEMPLATE_STR.format(**IPAD_DATA),
)
reader = smartcsv.reader(StringIO(csv_data), columns=COLUMNS_1)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(
isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertModelsEquals(iphone, IPHONE_DATA)
self.assertModelsEquals(ipad, IPAD_DATA)
def test_valid_and_white_spaces_for_values_are_stripped(self):
"""Should strip white spaces out of values by default"""
csv_data = """
title,category,subcategory,currency,price,url,image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**IPHONE_DATA),
ipad_data=VALID_TEMPLATE_STR.format(**IPAD_DATA),
)
reader = smartcsv.reader(StringIO(csv_data), columns=COLUMNS_1)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(
isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertModelsEquals(iphone, IPHONE_DATA)
self.assertModelsEquals(ipad, IPAD_DATA)
def test_valid_and_white_spaces_for_values_are_not_stripped(self):
"""Should strip white spaces out of values"""
iphone_data = IPHONE_DATA.copy()
ipad_data = IPAD_DATA.copy()
iphone_data['category'] = " Phones "
ipad_data['price'] = " 599 "
csv_data = """
title,category,subcategory,currency,price,url,image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**iphone_data),
ipad_data=VALID_TEMPLATE_STR.format(**ipad_data),
)
reader = smartcsv.reader(
StringIO(csv_data), columns=COLUMNS_1, strip_white_spaces=False)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(
isinstance(iphone, dict) and isinstance(ipad, dict))
# First 4 spaces
iphone_data['title'] = " " + iphone_data['title']
self.assertModelsEquals(iphone, iphone_data)
self.assertModelsEquals(ipad, ipad_data)
def test_valid_csv_only_with_required_data(self):
"""Should be valid if only required data is passed"""
iphone_data = IPHONE_DATA.copy()
ipad_data = IPAD_DATA.copy()
iphone_data['subcategory'] = ""
iphone_data['image_url'] = ""
ipad_data['subcategory'] = ""
ipad_data['image_url'] = ""
csv_data = """
title,category,subcategory,currency,price,url,image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**iphone_data),
ipad_data=VALID_TEMPLATE_STR.format(**ipad_data),
)
reader = smartcsv.reader(StringIO(csv_data), columns=COLUMNS_1)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertModelsEquals(iphone, iphone_data)
self.assertModelsEquals(ipad, ipad_data)
def test_valid_csv_with_different_choices(self):
"""All choices for a field should be valid"""
iphone_data = IPHONE_DATA.copy()
ipad_data = IPAD_DATA.copy()
iphone_data['subcategory'] = ""
iphone_data['image_url'] = ""
ipad_data['subcategory'] = ""
ipad_data['image_url'] = ""
ipad_data['currency'] = "ARS"
csv_data = """
title,category,subcategory,currency,price,url,image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**iphone_data),
ipad_data=VALID_TEMPLATE_STR.format(**ipad_data),
)
reader = smartcsv.reader(StringIO(csv_data), columns=COLUMNS_1)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertModelsEquals(iphone, iphone_data)
self.assertModelsEquals(ipad, ipad_data)
def test_valid_csv_with_some_fields_not_required_empty(self):
"""Should be valid if some not required fields are filled and others don't"""
iphone_data = IPHONE_DATA.copy()
ipad_data = IPAD_DATA.copy()
iphone_data['subcategory'] = ""
iphone_data['image_url'] = ""
ipad_data['image_url'] = ""
csv_data = """
title,category,subcategory,currency,price,url,image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=VALID_TEMPLATE_STR.format(**iphone_data),
ipad_data=VALID_TEMPLATE_STR.format(**ipad_data),
)
reader = smartcsv.reader(StringIO(csv_data), columns=COLUMNS_1)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertEqual(iphone['title'], 'iPhone 5c blue')
self.assertEqual(ipad['title'], 'iPad mini')
self.assertEqual(iphone['subcategory'], '')
self.assertEqual(ipad['subcategory'], 'Apple')
def test_valid_csv_with_a_custom_dialect(self):
"""Should be valid if all data is passed"""
piped_template = VALID_TEMPLATE_STR.replace(',', '|')
csv_data = """
title|category|subcategory|currency|price|url|image_url
{iphone_data}
{ipad_data}
""".format(
iphone_data=piped_template.format(**IPHONE_DATA),
ipad_data=piped_template.format(**IPAD_DATA),
)
csv.register_dialect('pipes', delimiter='|')
reader = smartcsv.reader(
StringIO(csv_data), dialect='pipes', columns=COLUMNS_1)
iphone = next(reader)
ipad = next(reader)
self.assertRaises(StopIteration, lambda: list(next(reader)))
self.assertTrue(
isinstance(iphone, dict) and isinstance(ipad, dict))
self.assertModelsEquals(iphone, IPHONE_DATA)
self.assertModelsEquals(ipad, IPAD_DATA)
| {
"content_hash": "223248e928378ae7a7ba5d50d7f99b9b",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 85,
"avg_line_length": 33.26996197718631,
"alnum_prop": 0.624,
"repo_name": "santiagobasulto/smartcsv",
"id": "0515a04249880de37cdca32759382eaa37c50424",
"size": "8750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_valid_csv.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "84287"
}
],
"symlink_target": ""
} |
import HTMLParser
import os
import re
from nltk import word_tokenize
html_parser = HTMLParser.HTMLParser()
# this is the path of the folder that will contain the tweet files
tweets_folder = os.path.join("D:", os.sep, "Documents", "PycharmProjects",
"easy_group_classifier", "text_files")
# checks if previous path exists, if not, it creates it
if not os.path.isdir(tweets_folder):
os.makedirs(tweets_folder)
# the name of the file to clean
word = "BBCTech"
# this is name of the file that contains all the "unclean" tweets.
# it will be inside the previously mentioned tweets_folder
tweets_file = os.path.join(tweets_folder, "%s.txt" % word)
clean_tweets_file = os.path.join(tweets_folder, "%s_clean.txt" % word)
tweet_list = []
# read "unclean" tweets from file
with open(tweets_file, "rb") as f:
for line in f:
tweet_list.append(line.strip())
def remove_urls_n_bloat(text):
"""Remove any URL, mention and hashtag symbol included in the text."""
# this removes URLs, mentions and RT bloat
new_text = " ".join(
re.sub("(@[A-Za-z0-9_]+)|(\w+:\/\/\S+)|(^RT)|(:\s+)",
" ", text).split())
# this removes the hashtag symbols only from hashtaged words
newer_text = re.sub("(#[A-Za-z]+[A-Za-z0-9]*)|(#[0-9]+[A-Za-z]+)",
lambda m: m.group(0)[1:], new_text)
return newer_text
def remove_emoticons(text):
"""Remove emoticons, emoji and pictograph characters from the text."""
try:
# Wide UCS-4 build
myre = re.compile(u'['
u'\U0001F300-\U0001F64F'
u'\U0001F680-\U0001F6FF'
u'\u2600-\u26FF\u2700-\u27BF]+',
re.UNICODE)
except re.error:
# Narrow UCS-2 build
myre = re.compile(u'('
u'\ud83c[\udf00-\udfff]|'
u'\ud83d[\udc00-\ude4f\ude80-\udeff]|'
u'[\u2600-\u26FF\u2700-\u27BF])+',
re.UNICODE)
unicode_text = text.decode("utf-8")
new_text = myre.sub('', unicode_text).encode("utf-8")
return new_text
def escape_HTML_chars(text):
"""Escape any HTML characters present in the text."""
text = text.decode("utf-8")
new_text = html_parser.unescape(text).encode("utf-8")
return new_text
def separate_punctuation(text):
"""Separate punctuation marks in the text.
e.g. "I have a question, what is your name?"
--> "I have a question , what is your name ?"
"""
text = text.decode("utf-8")
word_tokens = word_tokenize(text, "english")
new_text = " ".join(word_tokens).encode("utf-8")
return new_text
def separate_joint_words(name):
"""Separate the remains of previously hashtaged words into
base words.
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1 \2', s1).lower()
clean_tweet_list = []
for tweet in tweet_list:
tweet = remove_urls_n_bloat(tweet)
tweet = remove_emoticons(tweet)
tweet = escape_HTML_chars(tweet)
tweet = separate_joint_words(tweet)
tweet = separate_punctuation(tweet)
clean_tweet_list.append(tweet)
# this is to get rid of repeated tweets
clean_tweet_list = list(set(clean_tweet_list))
# write the cleaned tweets to a file
with open(clean_tweets_file, "wb") as f:
for tweet in clean_tweet_list:
f.write("%s\n" % tweet)
| {
"content_hash": "ce82c53646ffb949e37879a17b2597fa",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 74,
"avg_line_length": 30.692307692307693,
"alnum_prop": 0.5753272069061542,
"repo_name": "jluukvg/text-classifier",
"id": "209ba912fd5f13e33d23ff3299ee4657e87c64e9",
"size": "3617",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/clean_tweets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "21119"
},
{
"name": "Python",
"bytes": "15199"
}
],
"symlink_target": ""
} |
from mock import MagicMock
from django.utils import six
from django.test import TestCase
from django import template
from django.contrib.auth.models import User, Group, Permission
from django.core.urlresolvers import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from wagtail.tests.utils import unittest, WagtailTestUtils
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import (
Format,
get_image_format,
register_image_format
)
from wagtail.wagtailimages.backends import get_image_backend
from wagtail.wagtailimages.backends.pillow import PillowBackend
def get_test_image_file():
from six import BytesIO
from PIL import Image
from django.core.files.images import ImageFile
f = BytesIO()
image = Image.new('RGB', (640, 480), 'white')
image.save(f, 'PNG')
return ImageFile(f, name='test.png')
Image = get_image_model()
class TestImage(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_is_portrait(self):
self.assertFalse(self.image.is_portrait())
def test_is_landscape(self):
self.assertTrue(self.image.is_landscape())
class TestImagePermissions(TestCase):
def setUp(self):
# Create some user accounts for testing permissions
self.user = User.objects.create_user(username='user', email='user@email.com', password='password')
self.owner = User.objects.create_user(username='owner', email='owner@email.com', password='password')
self.editor = User.objects.create_user(username='editor', email='editor@email.com', password='password')
self.editor.groups.add(Group.objects.get(name='Editors'))
self.administrator = User.objects.create_superuser(username='administrator', email='administrator@email.com', password='password')
# Owner user must have the add_image permission
self.owner.user_permissions.add(Permission.objects.get(codename='add_image'))
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
uploaded_by_user=self.owner,
file=get_test_image_file(),
)
def test_administrator_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.administrator))
def test_editor_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.editor))
def test_owner_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.owner))
def test_user_cant_edit(self):
self.assertFalse(self.image.is_editable_by_user(self.user))
class TestRenditions(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_default_backend(self):
# default backend should be pillow
backend = get_image_backend()
self.assertTrue(isinstance(backend, PillowBackend))
def test_minification(self):
rendition = self.image.get_rendition('width-400')
# Check size
self.assertEqual(rendition.width, 400)
self.assertEqual(rendition.height, 300)
def test_resize_to_max(self):
rendition = self.image.get_rendition('max-100x100')
# Check size
self.assertEqual(rendition.width, 100)
self.assertEqual(rendition.height, 75)
def test_resize_to_min(self):
rendition = self.image.get_rendition('min-120x120')
# Check size
self.assertEqual(rendition.width, 160)
self.assertEqual(rendition.height, 120)
def test_resize_to_original(self):
rendition = self.image.get_rendition('original')
# Check size
self.assertEqual(rendition.width, 640)
self.assertEqual(rendition.height, 480)
def test_cache(self):
# Get two renditions with the same filter
first_rendition = self.image.get_rendition('width-400')
second_rendition = self.image.get_rendition('width-400')
# Check that they are the same object
self.assertEqual(first_rendition, second_rendition)
class TestRenditionsWand(TestCase):
def setUp(self):
try:
import wand
except ImportError:
# skip these tests if Wand is not installed
raise unittest.SkipTest(
"Skipping image backend tests for wand, as wand is not installed")
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
self.image.backend = 'wagtail.wagtailimages.backends.wand.WandBackend'
def test_minification(self):
rendition = self.image.get_rendition('width-400')
# Check size
self.assertEqual(rendition.width, 400)
self.assertEqual(rendition.height, 300)
def test_resize_to_max(self):
rendition = self.image.get_rendition('max-100x100')
# Check size
self.assertEqual(rendition.width, 100)
self.assertEqual(rendition.height, 75)
def test_resize_to_min(self):
rendition = self.image.get_rendition('min-120x120')
# Check size
self.assertEqual(rendition.width, 160)
self.assertEqual(rendition.height, 120)
def test_resize_to_original(self):
rendition = self.image.get_rendition('original')
# Check size
self.assertEqual(rendition.width, 640)
self.assertEqual(rendition.height, 480)
def test_cache(self):
# Get two renditions with the same filter
first_rendition = self.image.get_rendition('width-400')
second_rendition = self.image.get_rendition('width-400')
# Check that they are the same object
self.assertEqual(first_rendition, second_rendition)
class TestImageTag(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def render_image_tag(self, image, filter_spec):
temp = template.Template('{% load wagtailimages_tags %}{% image image_obj ' + filter_spec + '%}')
context = template.Context({'image_obj': image})
return temp.render(context)
def test_image_tag(self):
result = self.render_image_tag(self.image, 'width-400')
# Check that all the required HTML attributes are set
self.assertTrue('width="400"' in result)
self.assertTrue('height="300"' in result)
self.assertTrue('alt="Test image"' in result)
def render_image_tag_as(self, image, filter_spec):
temp = template.Template('{% load wagtailimages_tags %}{% image image_obj ' + filter_spec + ' as test_img %}<img {{ test_img.attrs }} />')
context = template.Context({'image_obj': image})
return temp.render(context)
def test_image_tag_attrs(self):
result = self.render_image_tag_as(self.image, 'width-400')
# Check that all the required HTML attributes are set
self.assertTrue('width="400"' in result)
self.assertTrue('height="300"' in result)
self.assertTrue('alt="Test image"' in result)
def render_image_tag_with_extra_attributes(self, image, title):
temp = template.Template('{% load wagtailimages_tags %}{% image image_obj width-400 class="photo" title=title|lower %}')
context = template.Context({'image_obj': image, 'title': title})
return temp.render(context)
def test_image_tag_with_extra_attributes(self):
result = self.render_image_tag_with_extra_attributes(self.image, 'My Wonderful Title')
# Check that all the required HTML attributes are set
self.assertTrue('width="400"' in result)
self.assertTrue('height="300"' in result)
self.assertTrue('class="photo"' in result)
self.assertTrue('title="my wonderful title"' in result)
## ===== ADMIN VIEWS =====
class TestImageIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_index'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/index.html')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.get({'ordering': ordering})
self.assertEqual(response.status_code, 200)
class TestImageAddView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_add_image'), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages_add_image'), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
def test_add(self):
response = self.post({
'title': "Test image",
'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages_index'))
# Check that the image was created
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 1)
# Test that size was populated correctly
image = images.first()
self.assertEqual(image.width, 640)
self.assertEqual(image.height, 480)
class TestImageEditView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages_edit_image', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages_edit_image', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/edit.html')
def test_edit(self):
response = self.post({
'title': "Edited",
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages_index'))
# Check that the image was edited
image = Image.objects.get(id=self.image.id)
self.assertEqual(image.title, "Edited")
class TestImageDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages_delete_image', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages_delete_image', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/confirm_delete.html')
def test_delete(self):
response = self.post({
'hello': 'world'
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages_index'))
# Check that the image was deleted
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 0)
class TestImageChooserView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_chooser'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
class TestImageChooserChosenView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages_image_chosen', args=(self.image.id,)), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/image_chosen.js')
# TODO: Test posting
class TestImageChooserUploadView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages_chooser_upload'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
# TODO: Test uploading through chooser
class TestFormat(TestCase):
def setUp(self):
# test format
self.format = Format(
'test name',
'test label',
'test classnames',
'test filter spec'
)
# test image
self.image = MagicMock()
self.image.id = 0
def test_editor_attributes(self):
result = self.format.editor_attributes(
self.image,
'test alt text'
)
self.assertEqual(result,
'data-embedtype="image" data-id="0" data-format="test name" data-alt="test alt text" ')
def test_image_to_editor_html(self):
result = self.format.image_to_editor_html(
self.image,
'test alt text'
)
six.assertRegex(self, result,
'<img data-embedtype="image" data-id="0" data-format="test name" data-alt="test alt text" class="test classnames" src="[^"]+" width="1" height="1" alt="test alt text">',
)
def test_image_to_html_no_classnames(self):
self.format.classnames = None
result = self.format.image_to_html(self.image, 'test alt text')
six.assertRegex(self, result,
'<img src="[^"]+" width="1" height="1" alt="test alt text">'
)
self.format.classnames = 'test classnames'
def test_get_image_format(self):
register_image_format(self.format)
result = get_image_format('test name')
self.assertEqual(result, self.format)
| {
"content_hash": "e68edd762ffb0bf856a9a1cae3c20d80",
"timestamp": "",
"source": "github",
"line_count": 472,
"max_line_length": 181,
"avg_line_length": 33.92372881355932,
"alnum_prop": 0.6380214838870847,
"repo_name": "lojack/wagtail",
"id": "48a31ae89c651aa413a606f4bab7aaecf247a2c9",
"size": "16012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wagtail/wagtailimages/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "127495"
},
{
"name": "D",
"bytes": "2012"
},
{
"name": "JavaScript",
"bytes": "41818"
},
{
"name": "Python",
"bytes": "831307"
},
{
"name": "Shell",
"bytes": "8483"
}
],
"symlink_target": ""
} |
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class appqoepolicy_binding(base_resource):
""" Binding class showing the resources that can be bound to appqoepolicy_binding.
"""
def __init__(self) :
self._name = ""
self.appqoepolicy_lbvserver_binding = []
@property
def name(self) :
""".<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
""".<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def appqoepolicy_lbvserver_bindings(self) :
"""lbvserver that can be bound to appqoepolicy.
"""
try :
return self._appqoepolicy_lbvserver_binding
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(appqoepolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.appqoepolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(self, service, name) :
""" Use this API to fetch appqoepolicy_binding resource.
"""
try :
if type(name) is not list :
obj = appqoepolicy_binding()
obj.name = name
response = obj.get_resource(service)
else :
if name and len(name) > 0 :
obj = [appqoepolicy_binding() for _ in range(len(name))]
for i in range(len(name)) :
obj[i].name = name[i];
response[i] = obj[i].get_resource(service)
return response
except Exception as e:
raise e
class appqoepolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.appqoepolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.appqoepolicy_binding = [appqoepolicy_binding() for _ in range(length)]
| {
"content_hash": "50b42bc77bb0d3d1f85ae3851ad7b552",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 122,
"avg_line_length": 28.622448979591837,
"alnum_prop": 0.689126559714795,
"repo_name": "mahabs/nitro",
"id": "fdb1a7805e0260cee4050504533cf17a403f26f0",
"size": "3419",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nssrc/com/citrix/netscaler/nitro/resource/config/appqoe/appqoepolicy_binding.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "498"
},
{
"name": "Python",
"bytes": "10647176"
}
],
"symlink_target": ""
} |
from channels import Group
from channels.test import ChannelTestCase, WSClient, apply_routes
#TODO: use apply_routes here, these tests are wrong.
from msn import consumer
class MSNConsumerTest(ChannelTestCase):
def test_ws_connect(self):
client = WSClient()
default = 'turnex'
# Inject a message onto the channel to use in a consumer
#Channel("input").send({"value": 33})
# Run the consumer with the new Message object
#message = self.get_next_message("input", require=True)
#consumer.ws_connect(message)
# Verify there's a reply and that it's accurate
#result = self.get_next_message(message.reply_channel.name,
# require=True)
#self.assertIsNotNone(result)
client.send_and_consume('websocket.connect', path='/')
self.assertIsNone(client.receive())
Group(default).send({'text': 'ok'}, immediately=True)
self.assertEqual(client.receive(json=False), 'ok')
client.send_and_consume('websocket.receive',
text={'message': 'hey'},
path='/')
self.assertEqual(client.receive(), {'event': 'error', 'body': 'Stop Hacking.'})
client.send_and_consume('websocket.disconnect',
text={'message': 'hey'},
path='/')
| {
"content_hash": "cc41eae5b9a70ab815a4f06e7c09bd1d",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 87,
"avg_line_length": 39.333333333333336,
"alnum_prop": 0.5783898305084746,
"repo_name": "mleger45/turnex",
"id": "07fa66880af7f5c9537fb497b5824900827deba6",
"size": "1441",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "msn/tests/test_consumer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "91270"
},
{
"name": "Dockerfile",
"bytes": "282"
},
{
"name": "JavaScript",
"bytes": "153971"
},
{
"name": "Jinja",
"bytes": "7295"
},
{
"name": "Procfile",
"bytes": "100"
},
{
"name": "Python",
"bytes": "38258"
},
{
"name": "Shell",
"bytes": "324"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import frappe
import time
from frappe import _, msgprint
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock
from frappe.utils.background_jobs import enqueue
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model import optional_fields
from frappe.utils.file_manager import save_url
# once_only validation
# methods
def get_doc(arg1, arg2=None):
"""returns a frappe.model.Document object.
:param arg1: Document dict or DocType name.
:param arg2: [optional] document name.
There are two ways to call `get_doc`
# will fetch the latest user object (with child table) from the database
user = get_doc("User", "test@example.com")
# create a new object
user = get_doc({
"doctype":"User"
"email_id": "test@example.com",
"user_roles: [
{"role": "System Manager"}
]
})
"""
if isinstance(arg1, BaseDocument):
return arg1
elif isinstance(arg1, basestring):
doctype = arg1
else:
doctype = arg1.get("doctype")
controller = get_controller(doctype)
if controller:
return controller(arg1, arg2)
raise ImportError, arg1
class Document(BaseDocument):
"""All controllers inherit from `Document`."""
def __init__(self, arg1, arg2=None):
"""Constructor.
:param arg1: DocType name as string or document **dict**
:param arg2: Document name, if `arg1` is DocType name.
If DocType name and document name are passed, the object will load
all values (including child documents) from the database.
"""
self.doctype = self.name = None
self._default_new_docs = {}
self.flags = frappe._dict()
if arg1 and isinstance(arg1, basestring):
if not arg2:
# single
self.doctype = self.name = arg1
else:
self.doctype = arg1
if isinstance(arg2, dict):
# filter
self.name = frappe.db.get_value(arg1, arg2, "name")
if self.name is None:
frappe.throw(_("{0} {1} not found").format(_(arg1), arg2), frappe.DoesNotExistError)
else:
self.name = arg2
self.load_from_db()
elif isinstance(arg1, dict):
super(Document, self).__init__(arg1)
self.init_valid_columns()
else:
# incorrect arguments. let's not proceed.
raise frappe.DataError("Document({0}, {1})".format(arg1, arg2))
def reload(self):
"""Reload document from database"""
self.load_from_db()
def load_from_db(self):
"""Load document and children from database and create properties
from fields"""
if not getattr(self, "_metaclass", False) and self.meta.issingle:
single_doc = frappe.db.get_singles_dict(self.doctype)
if not single_doc:
single_doc = frappe.new_doc(self.doctype).as_dict()
single_doc["name"] = self.doctype
del single_doc["__islocal"]
super(Document, self).__init__(single_doc)
self.init_valid_columns()
self._fix_numeric_types()
else:
d = frappe.db.get_value(self.doctype, self.name, "*", as_dict=1)
if not d:
frappe.throw(_("{0} {1} not found").format(_(self.doctype), self.name), frappe.DoesNotExistError)
super(Document, self).__init__(d)
if self.name=="DocType" and self.doctype=="DocType":
from frappe.model.meta import doctype_table_fields
table_fields = doctype_table_fields
else:
table_fields = self.meta.get_table_fields()
for df in table_fields:
children = frappe.db.get_values(df.options,
{"parent": self.name, "parenttype": self.doctype, "parentfield": df.fieldname},
"*", as_dict=True, order_by="idx asc")
if children:
self.set(df.fieldname, children)
else:
self.set(df.fieldname, [])
# sometimes __setup__ can depend on child values, hence calling again at the end
if hasattr(self, "__setup__"):
self.__setup__()
def get_latest(self):
if not getattr(self, "latest", None):
self.latest = frappe.get_doc(self.doctype, self.name)
return self.latest
def check_permission(self, permtype='read', permlabel=None):
"""Raise `frappe.PermissionError` if not permitted"""
if not self.has_permission(permtype):
self.raise_no_permission_to(permlabel or permtype)
def has_permission(self, permtype="read", verbose=False):
"""Call `frappe.has_permission` if `self.flags.ignore_permissions`
is not set.
:param permtype: one of `read`, `write`, `submit`, `cancel`, `delete`"""
if self.flags.ignore_permissions:
return True
return frappe.has_permission(self.doctype, permtype, self, verbose=verbose)
def raise_no_permission_to(self, perm_type):
"""Raise `frappe.PermissionError`."""
msg = _("No permission to {0} {1} {2}".format(perm_type, self.doctype, self.name or ""))
frappe.msgprint(msg)
raise frappe.PermissionError(msg)
def insert(self, ignore_permissions=None):
"""Insert the document in the database (as a new document).
This will check for user permissions and execute `before_insert`,
`validate`, `on_update`, `after_insert` methods if they are written.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.email_alerts_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
self.set("__islocal", True)
self.check_permission("create")
self._set_defaults()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.run_method("before_insert")
self.set_new_name()
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.flags.in_insert = True
self.run_before_save_methods()
self._validate()
self.set_docstatus()
self.flags.in_insert = False
# run validate, on update etc.
# parent
if getattr(self.meta, "issingle", 0):
self.update_single(self.get_valid_dict())
else:
self.db_insert()
# children
for d in self.get_all_children():
d.db_insert()
self.run_method("after_insert")
self.flags.in_insert = True
if self.get("amended_from"):
self.copy_attachments_from_amended_from()
self.run_post_save_methods()
self.flags.in_insert = False
# delete __islocal
if hasattr(self, "__islocal"):
delattr(self, "__islocal")
return self
def save(self, *args, **kwargs):
"""Wrapper for _save"""
return self._save(*args, **kwargs)
def _save(self, ignore_permissions=None):
"""Save the current document in the database in the **DocType**'s table or
`tabSingles` (for single types).
This will check for user permissions and execute
`validate` before updating, `on_update` after updating triggers.
:param ignore_permissions: Do not check permissions if True."""
if self.flags.in_print:
return
self.flags.email_alerts_executed = []
if ignore_permissions!=None:
self.flags.ignore_permissions = ignore_permissions
if self.get("__islocal") or not self.get("name"):
self.insert()
return
self.check_permission("write", "save")
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
self.set_parent_in_children()
self.validate_higher_perm_levels()
self.run_before_save_methods()
if self._action != "cancel":
self._validate()
if self._action == "update_after_submit":
self.validate_update_after_submit()
self.set_docstatus()
# parent
if self.meta.issingle:
self.update_single(self.get_valid_dict())
else:
self.db_update()
self.update_children()
self.run_post_save_methods()
return self
def copy_attachments_from_amended_from(self):
'''Copy attachments from `amended_from`'''
from frappe.desk.form.load import get_attachments
#loop through attachments
for attach_item in get_attachments(self.doctype, self.amended_from):
#save attachments to new doc
save_url(attach_item.file_url, attach_item.file_name, self.doctype, self.name, "Home/Attachments", attach_item.is_private)
def update_children(self):
'''update child tables'''
for df in self.meta.get_table_fields():
self.update_child_table(df.fieldname, df)
def update_child_table(self, fieldname, df=None):
'''sync child table for given fieldname'''
rows = []
if not df:
df = self.meta.get_field(fieldname)
for d in self.get(df.fieldname):
d.db_update()
rows.append(d.name)
if df.options in (self.flags.ignore_children_type or []):
# do not delete rows for this because of flags
# hack for docperm :(
return
if rows:
# select rows that do not match the ones in the document
deleted_rows = frappe.db.sql("""select name from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s
and name not in ({1})""".format(df.options, ','.join(['%s'] * len(rows))),
[self.name, self.doctype, fieldname] + rows)
if len(deleted_rows) > 0:
# delete rows that do not match the ones in the document
frappe.db.sql("""delete from `tab{0}` where name in ({1})""".format(df.options,
','.join(['%s'] * len(deleted_rows))), tuple(row[0] for row in deleted_rows))
else:
# no rows found, delete all rows
frappe.db.sql("""delete from `tab{0}` where parent=%s
and parenttype=%s and parentfield=%s""".format(df.options),
(self.name, self.doctype, fieldname))
def set_new_name(self):
"""Calls `frappe.naming.se_new_name` for parent and child docs."""
set_new_name(self)
# set name for children
for d in self.get_all_children():
set_new_name(d)
def set_title_field(self):
"""Set title field based on template"""
def get_values():
values = self.as_dict()
# format values
for key, value in values.iteritems():
if value==None:
values[key] = ""
return values
if self.meta.get("title_field")=="title":
df = self.meta.get_field(self.meta.title_field)
if df.options:
self.set(df.fieldname, df.options.format(**get_values()))
elif self.is_new() and not self.get(df.fieldname) and df.default:
# set default title for new transactions (if default)
self.set(df.fieldname, df.default.format(**get_values()))
def update_single(self, d):
"""Updates values for Single type Document in `tabSingles`."""
frappe.db.sql("""delete from tabSingles where doctype=%s""", self.doctype)
for field, value in d.iteritems():
if field != "doctype":
frappe.db.sql("""insert into tabSingles(doctype, field, value)
values (%s, %s, %s)""", (self.doctype, field, value))
if self.doctype in frappe.db.value_cache:
del frappe.db.value_cache[self.doctype]
def set_user_and_timestamp(self):
self._original_modified = self.modified
self.modified = now()
self.modified_by = frappe.session.user
if not self.creation:
self.creation = self.modified
if not self.owner:
self.owner = self.modified_by
for d in self.get_all_children():
d.modified = self.modified
d.modified_by = self.modified_by
if not d.owner:
d.owner = self.owner
if not d.creation:
d.creation = self.creation
frappe.flags.currently_saving.append((self.doctype, self.name))
def set_docstatus(self):
if self.docstatus==None:
self.docstatus=0
for d in self.get_all_children():
d.docstatus = self.docstatus
def _validate(self):
self._validate_mandatory()
self._validate_links()
self._validate_selects()
self._validate_constants()
self._validate_length()
self._extract_images_from_text_editor()
self._sanitize_content()
self._save_passwords()
children = self.get_all_children()
for d in children:
d._validate_selects()
d._validate_constants()
d._validate_length()
d._extract_images_from_text_editor()
d._sanitize_content()
d._save_passwords()
if self.is_new():
# don't set fields like _assign, _comments for new doc
for fieldname in optional_fields:
self.set(fieldname, None)
def apply_fieldlevel_read_permissions(self):
'''Remove values the user is not allowed to read (called when loading in desk)'''
has_higher_permlevel = False
for p in self.get_permissions():
if p.permlevel > 0:
has_higher_permlevel = True
break
if not has_higher_permlevel:
return
has_access_to = self.get_permlevel_access('read')
for df in self.meta.fields:
if df.permlevel and not df.permlevel in has_access_to:
self.set(df.fieldname, None)
for table_field in self.meta.get_table_fields():
for df in frappe.get_meta(table_field.options).fields or []:
if df.permlevel and not df.permlevel in has_access_to:
for child in self.get(table_field.fieldname) or []:
child.set(df.fieldname, None)
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
if self.flags.ignore_permissions or frappe.flags.in_install:
return
has_access_to = self.get_permlevel_access()
high_permlevel_fields = self.meta.get_high_permlevel_fields()
if high_permlevel_fields:
self.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
# check for child tables
for df in self.meta.get_table_fields():
high_permlevel_fields = frappe.get_meta(df.options).meta.get_high_permlevel_fields()
if high_permlevel_fields:
for d in self.get(df.fieldname):
d.reset_values_if_no_permlevel_access(has_access_to, high_permlevel_fields)
def get_permlevel_access(self, permission_type='write'):
if not hasattr(self, "_has_access_to"):
user_roles = frappe.get_roles()
self._has_access_to = []
for perm in self.get_permissions():
if perm.role in user_roles and perm.permlevel > 0 and perm.get(permission_type):
if perm.permlevel not in self._has_access_to:
self._has_access_to.append(perm.permlevel)
return self._has_access_to
def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):
if not df:
df = self.meta.get_field(fieldname)
return df.permlevel in self.get_permlevel_access()
def get_permissions(self):
if self.meta.istable:
# use parent permissions
permissions = frappe.get_meta(self.parenttype).permissions
else:
permissions = self.meta.permissions
return permissions
def _set_defaults(self):
if frappe.flags.in_import:
return
new_doc = frappe.new_doc(self.doctype, as_dict=True)
self.update_if_missing(new_doc)
# children
for df in self.meta.get_table_fields():
new_doc = frappe.new_doc(df.options, as_dict=True)
value = self.get(df.fieldname)
if isinstance(value, list):
for d in value:
d.update_if_missing(new_doc)
def check_if_latest(self):
"""Checks if `modified` timestamp provided by document being updated is same as the
`modified` timestamp in the database. If there is a different, the document has been
updated in the database after the current copy was read. Will throw an error if
timestamps don't match.
Will also validate document transitions (Save > Submit > Cancel) calling
`self.check_docstatus_transition`."""
conflict = False
self._action = "save"
if not self.get('__islocal'):
if self.meta.issingle:
modified = frappe.db.sql('''select value from tabSingles
where doctype=%s and field='modified' for update''', self.doctype)
modified = modified and modified[0][0]
if modified and modified != cstr(self._original_modified):
conflict = True
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
tmp = tmp[0]
modified = cstr(tmp.modified)
if modified and modified != cstr(self._original_modified):
conflict = True
self.check_docstatus_transition(tmp.docstatus)
if conflict:
frappe.msgprint(_("Error: Document has been modified after you have opened it") \
+ (" (%s, %s). " % (modified, self.modified)) \
+ _("Please refresh to get the latest document"),
raise_exception=frappe.TimestampMismatchError)
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
- Save (0) > Save (0)
- Save (0) > Submit (1)
- Submit (1) > Submit (1)
- Submit (1) > Cancel (2)
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self._action = "save"
elif self.docstatus==1:
self._action = "submit"
self.check_permission("submit")
else:
raise frappe.DocstatusTransitionError, _("Cannot change docstatus from 0 to 2")
elif docstatus==1:
if self.docstatus==1:
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
self._action = "cancel"
self.check_permission("cancel")
else:
raise frappe.DocstatusTransitionError, _("Cannot change docstatus from 1 to 0")
elif docstatus==2:
raise frappe.ValidationError, _("Cannot edit cancelled document")
def set_parent_in_children(self):
"""Updates `parent` and `parenttype` property in all children."""
for d in self.get_all_children():
d.parent = self.name
d.parenttype = self.doctype
def validate_update_after_submit(self):
if self.flags.ignore_validate_update_after_submit:
return
self._validate_update_after_submit()
for d in self.get_all_children():
if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:
# in case of a new row, don't validate allow on submit, if table is allow on submit
continue
d._validate_update_after_submit()
# TODO check only allowed values are updated
def _validate_mandatory(self):
if self.flags.ignore_mandatory:
return
missing = self._get_missing_mandatory_fields()
for d in self.get_all_children():
missing.extend(d._get_missing_mandatory_fields())
if not missing:
return
for fieldname, msg in missing:
msgprint(msg)
if frappe.flags.print_messages:
print self.as_json().encode("utf-8")
raise frappe.MandatoryError('[{doctype}, {name}]: {fields}'.format(
fields=", ".join((each[0] for each in missing)),
doctype=self.doctype,
name=self.name))
def _validate_links(self):
if self.flags.ignore_links:
return
invalid_links, cancelled_links = self.get_invalid_links()
for d in self.get_all_children():
result = d.get_invalid_links(is_submittable=self.meta.is_submittable)
invalid_links.extend(result[0])
cancelled_links.extend(result[1])
if invalid_links:
msg = ", ".join((each[2] for each in invalid_links))
frappe.throw(_("Could not find {0}").format(msg),
frappe.LinkValidationError)
if cancelled_links:
msg = ", ".join((each[2] for each in cancelled_links))
frappe.throw(_("Cannot link cancelled document: {0}").format(msg),
frappe.CancelledLinkError)
def get_all_children(self, parenttype=None):
"""Returns all children documents from **Table** type field in a list."""
ret = []
for df in self.meta.get("fields", {"fieldtype": "Table"}):
if parenttype:
if df.options==parenttype:
return self.get(df.fieldname)
value = self.get(df.fieldname)
if isinstance(value, list):
ret.extend(value)
return ret
def run_method(self, method, *args, **kwargs):
"""run standard triggers, plus those in hooks"""
if "flags" in kwargs:
del kwargs["flags"]
if hasattr(self, method) and hasattr(getattr(self, method), "__call__"):
fn = lambda self, *args, **kwargs: getattr(self, method)(*args, **kwargs)
else:
# hack! to run hooks even if method does not exist
fn = lambda self, *args, **kwargs: None
fn.__name__ = method.encode("utf-8")
out = Document.hook(fn)(self, *args, **kwargs)
self.run_email_alerts(method)
return out
def run_trigger(self, method, *args, **kwargs):
return self.run_method(method, *args, **kwargs)
def run_email_alerts(self, method):
'''Run email alerts for this method'''
if frappe.flags.in_import or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.email_alerts_executed==None:
self.flags.email_alerts_executed = []
from frappe.email.doctype.email_alert.email_alert import evaluate_alert
if self.flags.email_alerts == None:
alerts = frappe.cache().hget('email_alerts', self.doctype)
if alerts==None:
alerts = frappe.get_all('Email Alert', fields=['name', 'event', 'method'],
filters={'enabled': 1, 'document_type': self.doctype})
frappe.cache().hset('email_alerts', self.doctype, alerts)
self.flags.email_alerts = alerts
if not self.flags.email_alerts:
return
def _evaluate_alert(alert):
if not alert.name in self.flags.email_alerts_executed:
evaluate_alert(self, alert.name, alert.event)
self.flags.email_alerts_executed.append(alert.name)
event_map = {
"on_update": "Save",
"after_insert": "New",
"on_submit": "Submit",
"on_cancel": "Cancel"
}
if not self.flags.in_insert:
# value change is not applicable in insert
event_map['validate'] = 'Value Change'
event_map['before_change'] = 'Value Change'
for alert in self.flags.email_alerts:
event = event_map.get(method, None)
if event and alert.event == event:
_evaluate_alert(alert)
elif alert.event=='Method' and method == alert.method:
_evaluate_alert(alert)
@staticmethod
def whitelist(f):
f.whitelisted = True
return f
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self.docstatus = 2
self.save()
@whitelist.__func__
def submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self._submit()
@whitelist.__func__
def cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves."""
self._cancel()
def delete(self):
"""Delete document."""
frappe.delete_doc(self.doctype, self.name, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
- `before_cancel` for **Cancel**
- `before_update_after_submit` for **Update after Submit**
Will also update title_field if set"""
self.set_title_field()
self.reset_seen()
if self.flags.ignore_validate:
return
if self._action=="save":
self.run_method("validate")
self.run_method("before_save")
elif self._action=="submit":
self.run_method("validate")
self.run_method("before_submit")
elif self._action=="cancel":
self.run_method("before_cancel")
elif self._action=="update_after_submit":
self.run_method("before_update_after_submit")
def run_post_save_methods(self):
"""Run standard methods after `INSERT` or `UPDATE`. Standard Methods are:
- `on_update` for **Save**.
- `on_update`, `on_submit` for **Submit**.
- `on_cancel` for **Cancel**
- `update_after_submit` for **Update after Submit**"""
if self._action=="save":
self.run_method("on_update")
elif self._action=="submit":
self.run_method("on_update")
self.run_method("on_submit")
if not self.flags.ignore_submit_comment:
self.add_comment("Submitted")
elif self._action=="cancel":
self.run_method("on_cancel")
self.check_no_back_links_exist()
if not self.flags.ignore_submit_comment:
self.add_comment("Cancelled")
elif self._action=="update_after_submit":
self.run_method("on_update_after_submit")
self.run_method('on_change')
self.update_timeline_doc()
self.clear_cache()
self.notify_update()
if (self.doctype, self.name) in frappe.flags.currently_saving:
frappe.flags.currently_saving.remove((self.doctype, self.name))
self.latest = None
def clear_cache(self):
frappe.cache().hdel("last_modified", self.doctype)
def reset_seen(self):
'''Clear _seen property and set current user as seen'''
if getattr(self.meta, 'track_seen', False):
self._seen = json.dumps([frappe.session.user])
def notify_update(self):
"""Publish realtime that the current document is modified"""
frappe.publish_realtime("doc_update", {"modified": self.modified, "doctype": self.doctype, "name": self.name},
doctype=self.doctype, docname=self.name, after_commit=True)
if not self.meta.get("read_only") and not self.meta.get("issingle") and \
not self.meta.get("istable"):
frappe.publish_realtime("list_update", {"doctype": self.doctype}, after_commit=True)
def check_no_back_links_exist(self):
"""Check if document links to any active document before Cancel."""
from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked
if not self.flags.ignore_links:
check_if_doc_is_linked(self, method="Cancel")
check_if_doc_is_dynamically_linked(self, method="Cancel")
@staticmethod
def whitelist(f):
"""Decorator: Whitelist method to be called remotely via REST API."""
f.whitelisted = True
return f
@staticmethod
def hook(f):
"""Decorator: Make method `hookable` (i.e. extensible by another app).
Note: If each hooked method returns a value (dict), then all returns are
collated in one dict and returned. Ideally, don't return values in hookable
methods, set properties in the document."""
def add_to_return_value(self, new_return_value):
if isinstance(new_return_value, dict):
if not self.get("_return_value"):
self._return_value = {}
self._return_value.update(new_return_value)
else:
self._return_value = new_return_value or self.get("_return_value")
def compose(fn, *hooks):
def runner(self, method, *args, **kwargs):
add_to_return_value(self, fn(self, *args, **kwargs))
for f in hooks:
add_to_return_value(self, f(self, method, *args, **kwargs))
return self._return_value
return runner
def composer(self, *args, **kwargs):
hooks = []
method = f.__name__
doc_events = frappe.get_doc_hooks()
for handler in doc_events.get(self.doctype, {}).get(method, []) \
+ doc_events.get("*", {}).get(method, []):
hooks.append(frappe.get_attr(handler))
composed = compose(f, *hooks)
return composed(self, method, *args, **kwargs)
return composer
def is_whitelisted(self, method):
fn = getattr(self, method, None)
if not fn:
raise NotFound("Method {0} not found".format(method))
elif not getattr(fn, "whitelisted", False):
raise Forbidden("Method {0} not whitelisted".format(method))
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""Check that value of fieldname should be 'condition' val2
else throw Exception."""
error_condition_map = {
"in": _("one of"),
"not in": _("none of"),
"^": _("beginning with"),
}
if not doc:
doc = self
val1 = doc.get_value(fieldname)
df = doc.meta.get_field(fieldname)
val2 = doc.cast(val2, df)
if not frappe.compare(val1, condition, val2):
label = doc.meta.get_label(fieldname)
condition_str = error_condition_map.get(condition, condition)
if doc.parentfield:
msg = _("Incorrect value in row {0}: {1} must be {2} {3}".format(doc.idx, label, condition_str, val2))
else:
msg = _("Incorrect value: {0} must be {1} {2}".format(label, condition_str, val2))
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
"""Raise exception if Table field is empty."""
if not (isinstance(self.get(parentfield), list) and len(self.get(parentfield)) > 0):
label = self.meta.get_label(parentfield)
frappe.throw(_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
:param doc: Document whose numeric properties are to be rounded.
:param fieldnames: [Optional] List of fields to be rounded."""
if not fieldnames:
fieldnames = (df.fieldname for df in
doc.meta.get("fields", {"fieldtype": ["in", ["Currency", "Float", "Percent"]]}))
for fieldname in fieldnames:
doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.parentfield)))
def get_url(self):
"""Returns Desk URL for this document. `/desk#Form/{doctype}/{name}`"""
return "/desk#Form/{doctype}/{name}".format(doctype=self.doctype, name=self.name)
def add_comment(self, comment_type, text=None, comment_by=None, link_doctype=None, link_name=None):
"""Add a comment to this document.
:param comment_type: e.g. `Comment`. See Communication for more info."""
comment = frappe.get_doc({
"doctype":"Communication",
"communication_type": "Comment",
"sender": comment_by or frappe.session.user,
"comment_type": comment_type,
"reference_doctype": self.doctype,
"reference_name": self.name,
"content": text or comment_type,
"link_doctype": link_doctype,
"link_name": link_name
}).insert(ignore_permissions=True)
return comment
def add_seen(self, user=None):
'''add the given/current user to list of users who have seen this document (_seen)'''
if not user:
user = frappe.session.user
if self.meta.track_seen:
if self._seen:
_seen = json.loads(self._seen)
else:
_seen = []
if user not in _seen:
_seen.append(user)
self.db_set('_seen', json.dumps(_seen), update_modified=False)
frappe.local.flags.commit = True
def get_signature(self):
"""Returns signature (hash) for private URL."""
return hashlib.sha224(get_datetime_str(self.creation)).hexdigest()
def get_liked_by(self):
liked_by = getattr(self, "_liked_by", None)
if liked_by:
return json.loads(liked_by)
else:
return []
def set_onload(self, key, value):
if not self.get("__onload"):
self.set("__onload", frappe._dict())
self.get("__onload")[key] = value
def update_timeline_doc(self):
if frappe.flags.in_install or not self.meta.get("timeline_field"):
return
timeline_doctype = self.meta.get_link_doctype(self.meta.timeline_field)
timeline_name = self.get(self.meta.timeline_field)
if not (timeline_doctype and timeline_name):
return
# update timeline doc in communication if it is different than current timeline doc
frappe.db.sql("""update `tabCommunication`
set timeline_doctype=%(timeline_doctype)s, timeline_name=%(timeline_name)s
where
reference_doctype=%(doctype)s and reference_name=%(name)s
and (timeline_doctype is null or timeline_doctype != %(timeline_doctype)s
or timeline_name is null or timeline_name != %(timeline_name)s)""",
{
"doctype": self.doctype,
"name": self.name,
"timeline_doctype": timeline_doctype,
"timeline_name": timeline_name
})
def queue_action(self, action, **kwargs):
'''Run an action in background. If the action has an inner function,
like _submit for submit, it will call that instead'''
# call _submit instead of submit, so you can override submit to call
# run_delayed based on some action
# See: Stock Reconciliation
if hasattr(self, '_' + action):
action = '_' + action
if file_lock.lock_exists(self.get_signature()):
frappe.throw(_('This document is currently queued for execution. Please try again'),
title=_('Document Queued'), indicator='red')
self.lock()
enqueue('frappe.model.document.execute_action', doctype=self.doctype, name=self.name,
action=action, **kwargs)
def lock(self, timeout=None):
'''Creates a lock file for the given document. If timeout is set,
it will retry every 1 second for acquiring the lock again
:param timeout: Timeout in seconds, default 0'''
signature = self.get_signature()
if file_lock.lock_exists(signature):
lock_exists = True
if timeout:
for i in range(timeout):
time.sleep(1)
if not file_lock.lock_exists(signature):
lock_exists = False
break
if lock_exists:
raise frappe.DocumentLockedError
file_lock.create_lock(signature)
def unlock(self):
'''Delete the lock file for this document'''
file_lock.delete_lock(self.get_signature())
def execute_action(doctype, name, action, **kwargs):
'''Execute an action on a document (called by background worker)'''
doc = frappe.get_doc(doctype, name)
doc.unlock()
try:
getattr(doc, action)(**kwargs)
except Exception:
frappe.db.rollback()
# add a comment (?)
if frappe.local.message_log:
msg = json.loads(frappe.local.message_log[-1]).get('message')
else:
msg = '<pre><code>' + frappe.get_traceback() + '</pre></code>'
doc.add_comment('Comment', _('Action Failed') + '<br><br>' + msg)
doc.notify_update()
| {
"content_hash": "26f333a68d41b16daa51c95cdafe1e7f",
"timestamp": "",
"source": "github",
"line_count": 1047,
"max_line_length": 125,
"avg_line_length": 31.12320916905444,
"alnum_prop": 0.6848953538329344,
"repo_name": "elba7r/frameworking",
"id": "0f385626818f22a58859f713e91b1bfd0c14aa94",
"size": "32687",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frappe/model/document.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "336604"
},
{
"name": "HTML",
"bytes": "198854"
},
{
"name": "JavaScript",
"bytes": "1262443"
},
{
"name": "Python",
"bytes": "1614773"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
} |
import sys
import hashlib
import struct
import base64
# Python 3.6+, the SHA3 is available in hashlib natively. Else this requires
# the pysha3 package (pip install pysha3).
if sys.version_info < (3, 6):
import sha3
# Test vector to make sure the right sha3 version will be used. pysha3 < 1.0
# used the old Keccak implementation. During the finalization of SHA3, NIST
# changed the delimiter suffix from 0x01 to 0x06. The Keccak sponge function
# stayed the same. pysha3 1.0 provides the previous Keccak hash, too.
TEST_VALUE = "e167f68d6563d75bb25f3aa49c29ef612d41352dc00606de7cbd630bb2665f51"
if TEST_VALUE != sha3.sha3_256(b"Hello World").hexdigest():
print("pysha3 version is < 1.0. Please install from:")
print("https://github.com/tiran/pysha3https://github.com/tiran/pysha3")
sys.exit(1)
# The first index we'll build is the position index in the hashring that is
# constructed by the hs_build_hsdir_index() function. Construction is:
# SHA3-256("node-idx" | node_identity |
# shared_random_value | INT_8(period_length) | INT_8(period_num) )
PREFIX = "node-idx".encode()
# 32 bytes ed25519 pubkey.
IDENTITY = ("\x42" * 32).encode()
# SRV is 32 bytes.
SRV = ("\x43" * 32).encode()
# Time period length is a 8 bytes value.
PERIOD_LEN = 1440
# Period number is a 8 bytes value.
PERIOD_NUM = 42
data = struct.pack('!8s32s32sQQ', PREFIX, IDENTITY, SRV, PERIOD_NUM,
PERIOD_LEN)
hsdir_index = hashlib.sha3_256(data).hexdigest()
print("[hs_build_hsdir_index] %s" % (hsdir_index))
# The second index we'll build is where the HS stores and the client fetches
# the descriptor on the hashring. It is constructed by the hs_build_hs_index()
# function and the construction is:
# SHA3-256("store-at-idx" | blinded_public_key |
# INT_8(replicanum) | INT_8(period_num) | INT_8(period_length) )
PREFIX = "store-at-idx".encode()
# 32 bytes ed25519 pubkey.
PUBKEY = ("\x42" * 32).encode()
# Replica number is a 8 bytes value.
REPLICA_NUM = 1
# Time period length is a 8 bytes value.
PERIOD_LEN = 1440
# Period number is a 8 bytes value.
PERIOD_NUM = 42
data = struct.pack('!12s32sQQQ', PREFIX, PUBKEY, REPLICA_NUM, PERIOD_LEN,
PERIOD_NUM)
hs_index = hashlib.sha3_256(data).hexdigest()
print("[hs_build_hs_index] %s" % (hs_index))
| {
"content_hash": "d7443c99765217a970af85081580aa95",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 83,
"avg_line_length": 38.950819672131146,
"alnum_prop": 0.6813973063973064,
"repo_name": "hexxcointakeover/hexxcoin",
"id": "af0b81f8ded0087779ce1da908f65a35ce71bc17",
"size": "2749",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tor/src/test/hs_indexes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "12388542"
},
{
"name": "C++",
"bytes": "6242603"
},
{
"name": "CMake",
"bytes": "2300"
},
{
"name": "CSS",
"bytes": "18572"
},
{
"name": "Gnuplot",
"bytes": "940"
},
{
"name": "HTML",
"bytes": "44315"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "Lua",
"bytes": "3321"
},
{
"name": "M4",
"bytes": "298710"
},
{
"name": "Makefile",
"bytes": "132265"
},
{
"name": "Objective-C",
"bytes": "2993642"
},
{
"name": "Objective-C++",
"bytes": "7240"
},
{
"name": "PHP",
"bytes": "3641"
},
{
"name": "Perl",
"bytes": "17978"
},
{
"name": "Python",
"bytes": "1267779"
},
{
"name": "QMake",
"bytes": "2093"
},
{
"name": "Roff",
"bytes": "30558"
},
{
"name": "Ruby",
"bytes": "3216"
},
{
"name": "Rust",
"bytes": "5757"
},
{
"name": "Shell",
"bytes": "91885"
},
{
"name": "q",
"bytes": "5584"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from oscar.core.loading import get_model, get_class
from oscar.test import factories
from oscar.test.testcases import WebTestCase
from decimal import Decimal as D
Order = get_model('order', 'Order')
UserAddress = get_model('address', 'UserAddress')
GatewayForm = get_class('checkout.forms', 'GatewayForm')
Country = get_model('address', 'Country')
class CheckoutMixin(object):
def create_digital_product(self):
product_class = factories.ProductClassFactory(
requires_shipping=False, track_stock=False)
product = factories.ProductFactory(product_class=product_class)
factories.StockRecordFactory(
num_in_stock=None, price_excl_tax=D('12.00'), product=product)
return product
def add_product_to_basket(self, product=None):
if product is None:
product = factories.ProductFactory()
factories.StockRecordFactory(
num_in_stock=10, price_excl_tax=D('12.00'), product=product)
detail_page = self.get(product.get_absolute_url())
form = detail_page.forms['add_to_basket_form']
form.submit()
def add_voucher_to_basket(self, voucher=None):
if voucher is None:
voucher = factories.create_voucher()
basket_page = self.get(reverse('basket:summary'))
form = basket_page.forms['voucher_form']
form['code'] = voucher.code
form.submit()
def enter_guest_details(self, email='guest@example.com'):
index_page = self.get(reverse('checkout:index'))
index_page.form['username'] = email
index_page.form.select('options', GatewayForm.GUEST)
return index_page.form.submit()
def create_shipping_country(self):
return factories.CountryFactory(
iso_3166_1_a2='GB', is_shipping_country=True)
def enter_shipping_address(self):
self.create_shipping_country()
address_page = self.get(reverse('checkout:shipping-address'))
form = address_page.forms['new_shipping_address']
form['first_name'] = 'John'
form['last_name'] = 'Doe'
form['line1'] = '1 Egg Road'
form['line4'] = 'Shell City'
form['postcode'] = 'N12 9RT'
form.submit()
def enter_shipping_method(self):
self.get(reverse('checkout:shipping-method'))
def place_order(self):
payment_details = self.get(
reverse('checkout:shipping-method')).follow().follow()
preview = payment_details.click(linkid="view_preview")
return preview.forms['place_order_form'].submit().follow()
def reach_payment_details_page(self, is_guest=False):
self.add_product_to_basket()
if is_guest:
self.enter_guest_details('hello@egg.com')
self.enter_shipping_address()
return self.get(
reverse('checkout:shipping-method')).follow().follow()
def ready_to_place_an_order(self, is_guest=False):
payment_details = self.reach_payment_details_page(is_guest)
return payment_details.click(linkid="view_preview")
class TestShippingAddressView(CheckoutMixin, WebTestCase):
def setUp(self):
super(TestShippingAddressView, self).setUp()
self.user_address = factories.UserAddressFactory(
user=self.user, country=self.create_shipping_country())
def test_requires_login(self):
response = self.get(reverse('checkout:shipping-address'), user=None)
self.assertIsRedirect(response)
def test_submitting_valid_form_adds_data_to_session(self):
self.add_product_to_basket()
page = self.get(reverse('checkout:shipping-address'))
form = page.forms['new_shipping_address']
form['first_name'] = 'Barry'
form['last_name'] = 'Chuckle'
form['line1'] = '1 King Street'
form['line4'] = 'Gotham City'
form['postcode'] = 'N1 7RR'
form['phone_number'] = '+44 1273 555 999'
response = form.submit()
self.assertRedirectsTo(response, 'checkout:shipping-method')
session_data = self.app.session['checkout_data']
session_fields = session_data['shipping']['new_address_fields']
self.assertEqual('Barry', session_fields['first_name'])
self.assertEqual('Chuckle', session_fields['last_name'])
self.assertEqual('1 King Street', session_fields['line1'])
self.assertEqual('Gotham City', session_fields['line4'])
self.assertEqual('N1 7RR', session_fields['postcode'])
| {
"content_hash": "a9451e12d51c364afc31abdc70848d44",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 76,
"avg_line_length": 40.13157894736842,
"alnum_prop": 0.6487431693989071,
"repo_name": "arbrandes/django-oscar-vat_moss",
"id": "bdf51135c8efbfbfdd84ad14021f12e46087aed9",
"size": "4575",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/test_checkout.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "98886"
},
{
"name": "Shell",
"bytes": "104"
}
],
"symlink_target": ""
} |
import pandoc
pandoc.core.PANDOC_PATH = '/usr/local/bin/pandoc'
doc = pandoc.Document()
doc.markdown = open('README.md').read()
f = open('README.rst', 'w+')
f.write(doc.rst)
f.close()
| {
"content_hash": "b7a7f192327e01018095539e05e6adaf",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 49,
"avg_line_length": 20.666666666666668,
"alnum_prop": 0.6774193548387096,
"repo_name": "tsantor/python-lorem-pixel",
"id": "46cd9643f78415f23137ccedb81eb463f24fedeb",
"size": "186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "md2rst.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8129"
}
],
"symlink_target": ""
} |
from django.core.management import call_command
from django.core.management.commands.migrate import Command as MigrateCommand
from django_cassandra_engine.utils import get_engine_from_db_alias
class Command(MigrateCommand):
def handle(self, *args, **options):
engine = get_engine_from_db_alias(options['database'])
# Call regular migrate if engine is different from ours
if engine != 'django_cassandra_engine':
return super(Command, self).handle(**options)
else:
self.stdout.write("Migrations are not supported in this engine. "
"Calling syncdb instead..")
call_command('syncdb', **options)
| {
"content_hash": "a5959025c88c9488675d48109bea6632",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 77,
"avg_line_length": 38.77777777777778,
"alnum_prop": 0.666189111747851,
"repo_name": "TargetHolding/django-cassandra-engine",
"id": "ba525d41430dc2f09e822faa9ac65256c17765ab",
"size": "698",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "django_cassandra_engine/management/commands/migrate.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "24562"
},
{
"name": "HTML",
"bytes": "6685"
},
{
"name": "JavaScript",
"bytes": "1413"
},
{
"name": "Python",
"bytes": "51117"
}
],
"symlink_target": ""
} |
"""Generated client library for cloudresourcemanager version v1beta1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.cloudresourcemanager.v1beta1 import cloudresourcemanager_v1beta1_messages as messages
class CloudresourcemanagerV1beta1(base_api.BaseApiClient):
"""Generated client library for service cloudresourcemanager version v1beta1."""
MESSAGES_MODULE = messages
BASE_URL = u'https://cloudresourcemanager.googleapis.com/'
_PACKAGE = u'cloudresourcemanager'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only']
_VERSION = u'v1beta1'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'CloudresourcemanagerV1beta1'
_URL_VERSION = u'v1beta1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None):
"""Create a new cloudresourcemanager handle."""
url = url or self.BASE_URL
super(CloudresourcemanagerV1beta1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers)
self.organizations = self.OrganizationsService(self)
self.projects = self.ProjectsService(self)
class OrganizationsService(base_api.BaseApiService):
"""Service class for the organizations resource."""
_NAME = u'organizations'
def __init__(self, client):
super(CloudresourcemanagerV1beta1.OrganizationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Fetches an Organization resource identified by the specified resource name.
Args:
request: (CloudresourcemanagerOrganizationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Organization) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'cloudresourcemanager.organizations.get',
ordered_params=[u'organizationsId'],
path_params=[u'organizationsId'],
query_params=[u'organizationId'],
relative_path=u'v1beta1/organizations/{organizationsId}',
request_field='',
request_type_name=u'CloudresourcemanagerOrganizationsGetRequest',
response_type_name=u'Organization',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Gets the access control policy for an Organization resource. May be empty.
if no such policy or resource exists. The `resource` field should be the
organization's resource name, e.g. "organizations/123".
Args:
request: (CloudresourcemanagerOrganizationsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.organizations.getIamPolicy',
ordered_params=[u'organizationsId'],
path_params=[u'organizationsId'],
query_params=[],
relative_path=u'v1beta1/organizations/{organizationsId}:getIamPolicy',
request_field=u'getIamPolicyRequest',
request_type_name=u'CloudresourcemanagerOrganizationsGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def List(self, request, global_params=None):
"""Lists Organization resources that are visible to the user and satisfy.
the specified filter. This method returns Organizations in an unspecified
order. New Organizations do not necessarily appear at the end of the list.
Args:
request: (CloudresourcemanagerOrganizationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListOrganizationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'cloudresourcemanager.organizations.list',
ordered_params=[],
path_params=[],
query_params=[u'filter', u'pageSize', u'pageToken'],
relative_path=u'v1beta1/organizations',
request_field='',
request_type_name=u'CloudresourcemanagerOrganizationsListRequest',
response_type_name=u'ListOrganizationsResponse',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the access control policy on an Organization resource. Replaces any.
existing policy. The `resource` field should be the organization's resource
name, e.g. "organizations/123".
Args:
request: (CloudresourcemanagerOrganizationsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.organizations.setIamPolicy',
ordered_params=[u'organizationsId'],
path_params=[u'organizationsId'],
query_params=[],
relative_path=u'v1beta1/organizations/{organizationsId}:setIamPolicy',
request_field=u'setIamPolicyRequest',
request_type_name=u'CloudresourcemanagerOrganizationsSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified Organization.
The `resource` field should be the organization's resource name,
e.g. "organizations/123".
Args:
request: (CloudresourcemanagerOrganizationsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestIamPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.organizations.testIamPermissions',
ordered_params=[u'organizationsId'],
path_params=[u'organizationsId'],
query_params=[],
relative_path=u'v1beta1/organizations/{organizationsId}:testIamPermissions',
request_field=u'testIamPermissionsRequest',
request_type_name=u'CloudresourcemanagerOrganizationsTestIamPermissionsRequest',
response_type_name=u'TestIamPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates an Organization resource identified by the specified resource name.
Args:
request: (CloudresourcemanagerOrganizationsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Organization) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'cloudresourcemanager.organizations.update',
ordered_params=[u'organizationsId'],
path_params=[u'organizationsId'],
query_params=[],
relative_path=u'v1beta1/organizations/{organizationsId}',
request_field=u'organization',
request_type_name=u'CloudresourcemanagerOrganizationsUpdateRequest',
response_type_name=u'Organization',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(CloudresourcemanagerV1beta1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
"""Creates a Project resource.
Initially, the Project resource is owned by its creator exclusively.
The creator can later grant permission to others to read or update the
Project.
Several APIs are activated automatically for the Project, including
Google Cloud Storage.
Args:
request: (CloudresourcemanagerProjectsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.projects.create',
ordered_params=[],
path_params=[],
query_params=[u'useLegacyStack'],
relative_path=u'v1beta1/projects',
request_field=u'project',
request_type_name=u'CloudresourcemanagerProjectsCreateRequest',
response_type_name=u'Project',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Marks the Project identified by the specified.
`project_id` (for example, `my-project-123`) for deletion.
This method will only affect the Project if the following criteria are met:
+ The Project does not have a billing account associated with it.
+ The Project has a lifecycle state of
ACTIVE.
This method changes the Project's lifecycle state from
ACTIVE
to DELETE_REQUESTED.
The deletion starts at an unspecified time, at which point the project is
no longer accessible.
Until the deletion completes, you can check the lifecycle state
checked by retrieving the Project with GetProject,
and the Project remains visible to ListProjects.
However, you cannot update the project.
After the deletion completes, the Project is not retrievable by
the GetProject and
ListProjects methods.
The caller must have modify permissions for this Project.
Args:
request: (CloudresourcemanagerProjectsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'cloudresourcemanager.projects.delete',
ordered_params=[u'projectId'],
path_params=[u'projectId'],
query_params=[],
relative_path=u'v1beta1/projects/{projectId}',
request_field='',
request_type_name=u'CloudresourcemanagerProjectsDeleteRequest',
response_type_name=u'Empty',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the Project identified by the specified.
`project_id` (for example, `my-project-123`).
The caller must have read permissions for this Project.
Args:
request: (CloudresourcemanagerProjectsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'cloudresourcemanager.projects.get',
ordered_params=[u'projectId'],
path_params=[u'projectId'],
query_params=[],
relative_path=u'v1beta1/projects/{projectId}',
request_field='',
request_type_name=u'CloudresourcemanagerProjectsGetRequest',
response_type_name=u'Project',
supports_download=False,
)
def GetAncestry(self, request, global_params=None):
"""Gets a list of ancestors in the resource hierarchy for the Project.
identified by the specified `project_id` (for example, `my-project-123`).
The caller must have read permissions for this Project.
Args:
request: (CloudresourcemanagerProjectsGetAncestryRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GetAncestryResponse) The response message.
"""
config = self.GetMethodConfig('GetAncestry')
return self._RunMethod(
config, request, global_params=global_params)
GetAncestry.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.projects.getAncestry',
ordered_params=[u'projectId'],
path_params=[u'projectId'],
query_params=[],
relative_path=u'v1beta1/projects/{projectId}:getAncestry',
request_field=u'getAncestryRequest',
request_type_name=u'CloudresourcemanagerProjectsGetAncestryRequest',
response_type_name=u'GetAncestryResponse',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Returns the IAM access control policy for the specified Project.
Permission is denied if the policy or the resource does not exist.
Args:
request: (CloudresourcemanagerProjectsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.projects.getIamPolicy',
ordered_params=[u'resource'],
path_params=[u'resource'],
query_params=[],
relative_path=u'v1beta1/projects/{resource}:getIamPolicy',
request_field=u'getIamPolicyRequest',
request_type_name=u'CloudresourcemanagerProjectsGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def List(self, request, global_params=None):
"""Lists Projects that are visible to the user and satisfy the.
specified filter. This method returns Projects in an unspecified order.
New Projects do not necessarily appear at the end of the list.
Args:
request: (CloudresourcemanagerProjectsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListProjectsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'cloudresourcemanager.projects.list',
ordered_params=[],
path_params=[],
query_params=[u'filter', u'pageSize', u'pageToken'],
relative_path=u'v1beta1/projects',
request_field='',
request_type_name=u'CloudresourcemanagerProjectsListRequest',
response_type_name=u'ListProjectsResponse',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the IAM access control policy for the specified Project. Replaces.
any existing policy.
The following constraints apply when using `setIamPolicy()`:
+ Project does not support `allUsers` and `allAuthenticatedUsers` as
`members` in a `Binding` of a `Policy`.
+ The owner role can be granted only to `user` and `serviceAccount`.
+ Service accounts can be made owners of a project directly
without any restrictions. However, to be added as an owner, a user must be
invited via Cloud Platform console and must accept the invitation.
+ A user cannot be granted the owner role using `setIamPolicy()`. The user
must be granted the owner role using the Cloud Platform Console and must
explicitly accept the invitation.
+ Invitations to grant the owner role cannot be sent using `setIamPolicy()`;
they must be sent only using the Cloud Platform Console.
+ Membership changes that leave the project without any owners that have
accepted the Terms of Service (ToS) will be rejected.
+ There must be at least one owner who has accepted the Terms of
Service (ToS) agreement in the policy. Calling `setIamPolicy()` to
to remove the last ToS-accepted owner from the policy will fail. This
restriction also applies to legacy projects that no longer have owners
who have accepted the ToS. Edits to IAM policies will be rejected until
the lack of a ToS-accepting owner is rectified.
+ Calling this method requires enabling the App Engine Admin API.
Note: Removing service accounts from policies or changing their roles
can render services completely inoperable. It is important to understand
how the service account is being used before removing or updating its roles.
Args:
request: (CloudresourcemanagerProjectsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.projects.setIamPolicy',
ordered_params=[u'resource'],
path_params=[u'resource'],
query_params=[],
relative_path=u'v1beta1/projects/{resource}:setIamPolicy',
request_field=u'setIamPolicyRequest',
request_type_name=u'CloudresourcemanagerProjectsSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified Project.
Args:
request: (CloudresourcemanagerProjectsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestIamPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.projects.testIamPermissions',
ordered_params=[u'resource'],
path_params=[u'resource'],
query_params=[],
relative_path=u'v1beta1/projects/{resource}:testIamPermissions',
request_field=u'testIamPermissionsRequest',
request_type_name=u'CloudresourcemanagerProjectsTestIamPermissionsRequest',
response_type_name=u'TestIamPermissionsResponse',
supports_download=False,
)
def Undelete(self, request, global_params=None):
"""Restores the Project identified by the specified.
`project_id` (for example, `my-project-123`).
You can only use this method for a Project that has a lifecycle state of
DELETE_REQUESTED.
After deletion starts, the Project cannot be restored.
The caller must have modify permissions for this Project.
Args:
request: (CloudresourcemanagerProjectsUndeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Undelete')
return self._RunMethod(
config, request, global_params=global_params)
Undelete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'cloudresourcemanager.projects.undelete',
ordered_params=[u'projectId'],
path_params=[u'projectId'],
query_params=[],
relative_path=u'v1beta1/projects/{projectId}:undelete',
request_field=u'undeleteProjectRequest',
request_type_name=u'CloudresourcemanagerProjectsUndeleteRequest',
response_type_name=u'Empty',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the attributes of the Project identified by the specified.
`project_id` (for example, `my-project-123`).
The caller must have modify permissions for this Project.
Args:
request: (Project) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'cloudresourcemanager.projects.update',
ordered_params=[u'projectId'],
path_params=[u'projectId'],
query_params=[],
relative_path=u'v1beta1/projects/{projectId}',
request_field='<request>',
request_type_name=u'Project',
response_type_name=u'Project',
supports_download=False,
)
| {
"content_hash": "6e040ec56cfb808be2cb4d9793381cca",
"timestamp": "",
"source": "github",
"line_count": 565,
"max_line_length": 124,
"avg_line_length": 40.14513274336283,
"alnum_prop": 0.701040472621462,
"repo_name": "KaranToor/MA450",
"id": "092962d382d9a83f254a394eff1682029b7e39b9",
"size": "22682",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/.install/.backup/lib/googlecloudsdk/third_party/apis/cloudresourcemanager/v1beta1/cloudresourcemanager_v1beta1_client.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
} |
"""
Interface with command line GULP.
http://projects.ivec.org
WARNING: you need to have GULP installed on your system.
"""
__author__ = "Bharat Medasani, Wenhao Sun"
__copyright__ = "Copyright 2013, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Bharat Medasani"
__email__ = "bkmedasani@lbl.gov,wenhao@mit.edu"
__status__ = "Production"
__date__ = "$Jun 22, 2013M$"
import os
import re
import subprocess
from monty.tempfile import ScratchDir
from pymatgen.analysis.bond_valence import BVAnalyzer
from pymatgen.core.lattice import Lattice
from pymatgen.core.periodic_table import Element
from pymatgen.core.structure import Structure
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
_anions = set(map(Element, ["O", "S", "F", "Cl", "Br", "N", "P"]))
_cations = set(
map(
Element,
[
"Li",
"Na",
"K", # alkali metals
"Be",
"Mg",
"Ca", # alkaline metals
"Al",
"Sc",
"Ti",
"V",
"Cr",
"Mn",
"Fe",
"Co",
"Ni",
"Cu",
"Zn",
"Ge",
"As",
"Y",
"Zr",
"Nb",
"Mo",
"Tc",
"Ru",
"Rh",
"Pd",
"Ag",
"Cd",
"In",
"Sn",
"Sb",
"Hf",
"Ta",
"W",
"Re",
"Os",
"Ir",
"Pt",
"Au",
"Hg",
"Tl",
"Pb",
"Bi",
"La",
"Ce",
"Pr",
"Nd",
"Pm",
"Sm",
"Eu",
"Gd",
"Tb",
"Dy",
"Ho",
"Er",
"Tm",
"Yb",
"Lu",
],
)
)
_gulp_kw = {
# Control of calculation type
"angle",
"bond",
"cosmo",
"cosmic",
"cost",
"defect",
"distance",
"eem",
"efg",
"fit",
"free_energy",
"gasteiger",
"genetic",
"gradients",
"md",
"montecarlo",
"noautobond",
"noenergy",
"optimise",
"pot",
"predict",
"preserve_Q",
"property",
"phonon",
"qeq",
"qbond",
"single",
"sm",
"static_first",
"torsion",
"transition_state",
# Geometric variable specification
"breathe",
"bulk_noopt",
"cellonly",
"conp",
"conv",
"isotropic",
"orthorhombic",
"nobreathe",
"noflgs",
"shell",
"unfix",
# Algorithm
"c6",
"dipole",
"fbfgs",
"fix_molecule",
"full",
"hill",
"kfull",
"marvinSE",
"madelung",
"minimum_image",
"molecule",
"molmec",
"molq",
"newda",
"noanisotropic_2b",
"nod2sym",
"nodsymmetry",
"noelectrostatics",
"noexclude",
"nofcentral",
"nofirst_point",
"noksymmetry",
"nolist_md",
"nomcediff",
"nonanal",
"noquicksearch",
"noreal",
"norecip",
"norepulsive",
"nosasinitevery",
"nosderv",
"nozeropt",
"numerical",
"qiter",
"qok",
"spatial",
"storevectors",
"nomolecularinternalke",
"voight",
"zsisa",
# Optimisation method
"conjugate",
"dfp",
"lbfgs",
"numdiag",
"positive",
"rfo",
"unit",
# Output control
"average",
"broaden_dos",
"cartesian",
"compare",
"conserved",
"dcharge",
"dynamical_matrix",
"eigenvectors",
"global",
"hessian",
"hexagonal",
"intensity",
"linmin",
"meanke",
"nodensity_out",
"nodpsym",
"nofirst_point",
"nofrequency",
"nokpoints",
"operators",
"outcon",
"prt_eam",
"prt_two",
"prt_regi_before",
"qsas",
"restore",
"save",
"terse",
# Structure control
"full",
"hexagonal",
"lower_symmetry",
"nosymmetry",
# PDF control
"PDF",
"PDFcut",
"PDFbelow",
"PDFkeep",
"coreinfo",
"nowidth",
"nopartial",
# Miscellaneous
"nomodcoord",
"oldunits",
"zero_potential",
}
class GulpIO:
"""
To generate GULP input and process output
"""
@staticmethod
def keyword_line(*args):
"""
Checks if the input args are proper gulp keywords and
generates the 1st line of gulp input. Full keywords are expected.
Args:
args: 1st line keywords
"""
# if len(list(filter(lambda x: x in _gulp_kw, args))) != len(args):
# raise GulpError("Wrong keywords given")
gin = " ".join(args)
gin += "\n"
return gin
@staticmethod
def structure_lines(
structure,
cell_flg=True,
frac_flg=True,
anion_shell_flg=True,
cation_shell_flg=False,
symm_flg=True,
):
"""
Generates GULP input string corresponding to pymatgen structure.
Args:
structure: pymatgen Structure object
cell_flg (default = True): Option to use lattice parameters.
fractional_flg (default = True): If True, fractional coordinates
are used. Else, Cartesian coordinates in Angstroms are used.
******
GULP convention is to use fractional coordinates for periodic
structures and Cartesian coordinates for non-periodic
structures.
******
anion_shell_flg (default = True): If True, anions are considered
polarizable.
cation_shell_flg (default = False): If True, cations are
considered polarizable.
symm_flg (default = True): If True, symmetry information is also
written.
Returns:
string containing structure for GULP input
"""
gin = ""
if cell_flg:
gin += "cell\n"
l = structure.lattice
lat_str = f"{l.a:6f} {l.b:6f} {l.c:6f} {l.alpha:6f} {l.beta:6f} {l.gamma:6f}"
gin += lat_str + "\n"
if frac_flg:
gin += "frac\n"
coord_attr = "frac_coords"
else:
gin += "cart\n"
coord_attr = "coords"
for site in structure.sites:
coord = [str(i) for i in getattr(site, coord_attr)]
specie = site.specie
core_site_desc = specie.symbol + " core " + " ".join(coord) + "\n"
gin += core_site_desc
if (specie in _anions and anion_shell_flg) or (specie in _cations and cation_shell_flg):
shel_site_desc = specie.symbol + " shel " + " ".join(coord) + "\n"
gin += shel_site_desc
else:
pass
if symm_flg:
gin += "space\n"
gin += str(SpacegroupAnalyzer(structure).get_space_group_number()) + "\n"
return gin
@staticmethod
def specie_potential_lines(structure, potential, **kwargs):
"""
Generates GULP input specie and potential string for pymatgen
structure.
Args:
structure: pymatgen.core.structure.Structure object
potential: String specifying the type of potential used
kwargs: Additional parameters related to potential. For
potential == "buckingham",
anion_shell_flg (default = False):
If True, anions are considered polarizable.
anion_core_chrg=float
anion_shell_chrg=float
cation_shell_flg (default = False):
If True, cations are considered polarizable.
cation_core_chrg=float
cation_shell_chrg=float
Returns:
string containing specie and potential specification for gulp
input.
"""
raise NotImplementedError("gulp_specie_potential not yet implemented.\nUse library_line instead")
@staticmethod
def library_line(file_name):
"""
Specifies GULP library file to read species and potential parameters.
If using library don't specify species and potential
in the input file and vice versa. Make sure the elements of
structure are in the library file.
Args:
file_name: Name of GULP library file
Returns:
GULP input string specifying library option
"""
gulplib_set = "GULP_LIB" in os.environ.keys()
def readable(f):
return os.path.isfile(f) and os.access(f, os.R_OK)
gin = ""
dirpath, fname = os.path.split(file_name)
if dirpath and readable(file_name): # Full path specified
gin = "library " + file_name
else:
fpath = os.path.join(os.getcwd(), file_name) # Check current dir
if readable(fpath):
gin = "library " + fpath
elif gulplib_set: # Check the GULP_LIB path
fpath = os.path.join(os.environ["GULP_LIB"], file_name)
if readable(fpath):
gin = "library " + file_name
if gin:
return gin + "\n"
raise GulpError("GULP Library not found")
def buckingham_input(self, structure, keywords, library=None, uc=True, valence_dict=None):
"""
Gets a GULP input for an oxide structure and buckingham potential
from library.
Args:
structure: pymatgen.core.structure.Structure
keywords: GULP first line keywords.
library (Default=None): File containing the species and potential.
uc (Default=True): Unit Cell Flag.
valence_dict: {El: valence}
"""
gin = self.keyword_line(*keywords)
gin += self.structure_lines(structure, symm_flg=not uc)
if not library:
gin += self.buckingham_potential(structure, valence_dict)
else:
gin += self.library_line(library)
return gin
@staticmethod
def buckingham_potential(structure, val_dict=None):
"""
Generate species, buckingham, and spring options for an oxide structure
using the parameters in default libraries.
Ref:
1. G.V. Lewis and C.R.A. Catlow, J. Phys. C: Solid State Phys.,
18, 1149-1161 (1985)
2. T.S.Bush, J.D.Gale, C.R.A.Catlow and P.D. Battle,
J. Mater Chem., 4, 831-837 (1994)
Args:
structure: pymatgen.core.structure.Structure
val_dict (Needed if structure is not charge neutral): {El:valence}
dict, where El is element.
"""
if not val_dict:
try:
# If structure is oxidation state decorated, use that first.
el = [site.specie.symbol for site in structure]
valences = [site.specie.oxi_state for site in structure]
val_dict = dict(zip(el, valences))
except AttributeError:
bv = BVAnalyzer()
el = [site.specie.symbol for site in structure]
valences = bv.get_valences(structure)
val_dict = dict(zip(el, valences))
# Try bush library first
bpb = BuckinghamPotential("bush")
bpl = BuckinghamPotential("lewis")
gin = ""
for key in val_dict.keys():
use_bush = True
el = re.sub(r"[1-9,+,\-]", "", key)
if el not in bpb.species_dict.keys():
use_bush = False
elif val_dict[key] != bpb.species_dict[el]["oxi"]:
use_bush = False
if use_bush:
gin += "species \n"
gin += bpb.species_dict[el]["inp_str"]
gin += "buckingham \n"
gin += bpb.pot_dict[el]
gin += "spring \n"
gin += bpb.spring_dict[el]
continue
# Try lewis library next if element is not in bush
# use_lewis = True
if el != "O": # For metals the key is "Metal_OxiState+"
k = el + "_" + str(int(val_dict[key])) + "+"
if k not in bpl.species_dict.keys():
# use_lewis = False
raise GulpError(f"Element {k} not in library")
gin += "species\n"
gin += bpl.species_dict[k]
gin += "buckingham\n"
gin += bpl.pot_dict[k]
else:
gin += "species\n"
k = "O_core"
gin += bpl.species_dict[k]
k = "O_shel"
gin += bpl.species_dict[k]
gin += "buckingham\n"
gin += bpl.pot_dict[key]
gin += "spring\n"
gin += bpl.spring_dict[key]
return gin
def tersoff_input(self, structure, periodic=False, uc=True, *keywords):
"""
Gets a GULP input with Tersoff potential for an oxide structure
Args:
structure: pymatgen.core.structure.Structure
periodic (Default=False): Flag denoting whether periodic
boundary conditions are used
library (Default=None): File containing the species and potential.
uc (Default=True): Unit Cell Flag.
keywords: GULP first line keywords.
"""
# gin="static noelectrostatics \n "
gin = self.keyword_line(*keywords)
gin += self.structure_lines(
structure,
cell_flg=periodic,
frac_flg=periodic,
anion_shell_flg=False,
cation_shell_flg=False,
symm_flg=not uc,
)
gin += self.tersoff_potential(structure)
return gin
@staticmethod
def tersoff_potential(structure):
"""
Generate the species, tersoff potential lines for an oxide structure
Args:
structure: pymatgen.core.structure.Structure
"""
bv = BVAnalyzer()
el = [site.specie.symbol for site in structure]
valences = bv.get_valences(structure)
el_val_dict = dict(zip(el, valences))
gin = "species \n"
qerfstring = "qerfc\n"
for key, value in el_val_dict.items():
if key != "O" and value % 1 != 0:
raise SystemError("Oxide has mixed valence on metal")
specie_string = key + " core " + str(value) + "\n"
gin += specie_string
qerfstring += key + " " + key + " 0.6000 10.0000 \n"
gin += "# noelectrostatics \n Morse \n"
met_oxi_ters = TersoffPotential().data
for key, value in el_val_dict.items():
if key != "O":
metal = key + "(" + str(int(value)) + ")"
ters_pot_str = met_oxi_ters[metal]
gin += ters_pot_str
gin += qerfstring
return gin
@staticmethod
def get_energy(gout):
"""
Args:
gout ():
Returns:
Energy
"""
energy = None
for line in gout.split("\n"):
if "Total lattice energy" in line and "eV" in line:
energy = line.split()
elif "Non-primitive unit cell" in line and "eV" in line:
energy = line.split()
if energy:
return float(energy[4])
raise GulpError("Energy not found in Gulp output")
@staticmethod
def get_relaxed_structure(gout):
"""
Args:
gout ():
Returns:
(Structure) relaxed structure.
"""
# Find the structure lines
structure_lines = []
cell_param_lines = []
output_lines = gout.split("\n")
no_lines = len(output_lines)
i = 0
# Compute the input lattice parameters
while i < no_lines:
line = output_lines[i]
if "Full cell parameters" in line:
i += 2
line = output_lines[i]
a = float(line.split()[8])
alpha = float(line.split()[11])
line = output_lines[i + 1]
b = float(line.split()[8])
beta = float(line.split()[11])
line = output_lines[i + 2]
c = float(line.split()[8])
gamma = float(line.split()[11])
i += 3
break
if "Cell parameters" in line:
i += 2
line = output_lines[i]
a = float(line.split()[2])
alpha = float(line.split()[5])
line = output_lines[i + 1]
b = float(line.split()[2])
beta = float(line.split()[5])
line = output_lines[i + 2]
c = float(line.split()[2])
gamma = float(line.split()[5])
i += 3
break
i += 1
while i < no_lines:
line = output_lines[i]
if "Final fractional coordinates of atoms" in line:
# read the site coordinates in the following lines
i += 6
line = output_lines[i]
while line[0:2] != "--":
structure_lines.append(line)
i += 1
line = output_lines[i]
# read the cell parameters
i += 9
line = output_lines[i]
if "Final cell parameters" in line:
i += 3
for del_i in range(6):
line = output_lines[i + del_i]
cell_param_lines.append(line)
break
i += 1
# Process the structure lines
if structure_lines:
sp = []
coords = []
for line in structure_lines:
fields = line.split()
if fields[2] == "c":
sp.append(fields[1])
coords.append(list(float(x) for x in fields[3:6]))
else:
raise OSError("No structure found")
if cell_param_lines:
a = float(cell_param_lines[0].split()[1])
b = float(cell_param_lines[1].split()[1])
c = float(cell_param_lines[2].split()[1])
alpha = float(cell_param_lines[3].split()[1])
beta = float(cell_param_lines[4].split()[1])
gamma = float(cell_param_lines[5].split()[1])
latt = Lattice.from_parameters(a, b, c, alpha, beta, gamma)
return Structure(latt, sp, coords)
class GulpCaller:
"""
Class to run gulp from commandline
"""
def __init__(self, cmd="gulp"):
"""
Initialize with the executable if not in the standard path
Args:
cmd: Command. Defaults to gulp.
"""
def is_exe(f):
return os.path.isfile(f) and os.access(f, os.X_OK)
fpath, fname = os.path.split(cmd)
if fpath:
if is_exe(cmd):
self._gulp_cmd = cmd
return
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
file = os.path.join(path, cmd)
if is_exe(file):
self._gulp_cmd = file
return
raise GulpError("Executable not found")
def run(self, gin):
"""
Run GULP using the gin as input
Args:
gin: GULP input string
Returns:
gout: GULP output string
"""
with ScratchDir("."):
with subprocess.Popen(
self._gulp_cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
) as p:
out, err = p.communicate(bytearray(gin, "utf-8"))
out = out.decode("utf-8")
err = err.decode("utf-8")
if "Error" in err or "error" in err:
print(gin)
print("----output_0---------")
print(out)
print("----End of output_0------\n\n\n")
print("----output_1--------")
print(out)
print("----End of output_1------")
raise GulpError(err)
# We may not need this
if "ERROR" in out:
raise GulpError(out)
# Sometimes optimisation may fail to reach convergence
conv_err_string = "Conditions for a minimum have not been satisfied"
if conv_err_string in out:
raise GulpConvergenceError()
gout = ""
for line in out.split("\n"):
gout = gout + line + "\n"
return gout
def get_energy_tersoff(structure, gulp_cmd="gulp"):
"""
Compute the energy of a structure using Tersoff potential.
Args:
structure: pymatgen.core.structure.Structure
gulp_cmd: GULP command if not in standard place
"""
gio = GulpIO()
gc = GulpCaller(gulp_cmd)
gin = gio.tersoff_input(structure)
gout = gc.run(gin)
return gio.get_energy(gout)
def get_energy_buckingham(structure, gulp_cmd="gulp", keywords=("optimise", "conp", "qok"), valence_dict=None):
"""
Compute the energy of a structure using Buckingham potential.
Args:
structure: pymatgen.core.structure.Structure
gulp_cmd: GULP command if not in standard place
keywords: GULP first line keywords
valence_dict: {El: valence}. Needed if the structure is not charge
neutral.
"""
gio = GulpIO()
gc = GulpCaller(gulp_cmd)
gin = gio.buckingham_input(structure, keywords, valence_dict=valence_dict)
gout = gc.run(gin)
return gio.get_energy(gout)
def get_energy_relax_structure_buckingham(structure, gulp_cmd="gulp", keywords=("optimise", "conp"), valence_dict=None):
"""
Relax a structure and compute the energy using Buckingham potential.
Args:
structure: pymatgen.core.structure.Structure
gulp_cmd: GULP command if not in standard place
keywords: GULP first line keywords
valence_dict: {El: valence}. Needed if the structure is not charge
neutral.
"""
gio = GulpIO()
gc = GulpCaller(gulp_cmd)
gin = gio.buckingham_input(structure, keywords, valence_dict=valence_dict)
gout = gc.run(gin)
energy = gio.get_energy(gout)
relax_structure = gio.get_relaxed_structure(gout)
return energy, relax_structure
class GulpError(Exception):
"""
Exception class for GULP.
Raised when the GULP gives an error
"""
def __init__(self, msg):
"""
Args:
msg (str): Message
"""
self.msg = msg
def __str__(self):
return "GulpError : " + self.msg
class GulpConvergenceError(Exception):
"""
Exception class for GULP.
Raised when proper convergence is not reached in Mott-Littleton
defect energy optimisation procedure in GULP
"""
def __init__(self, msg=""):
"""
Args:
msg (str): Message
"""
self.msg = msg
def __str__(self):
return self.msg
class BuckinghamPotential:
"""
Generate the Buckingham Potential Table from the bush.lib and lewis.lib.
Ref:
T.S.Bush, J.D.Gale, C.R.A.Catlow and P.D. Battle, J. Mater Chem.,
4, 831-837 (1994).
G.V. Lewis and C.R.A. Catlow, J. Phys. C: Solid State Phys., 18,
1149-1161 (1985)
"""
def __init__(self, bush_lewis_flag):
"""
Args:
bush_lewis_flag (str): Flag for using Bush or Lewis potential.
"""
assert bush_lewis_flag in {"bush", "lewis"}
pot_file = "bush.lib" if bush_lewis_flag == "bush" else "lewis.lib"
with open(os.path.join(os.environ["GULP_LIB"], pot_file)) as f:
# In lewis.lib there is no shell for cation
species_dict, pot_dict, spring_dict = {}, {}, {}
sp_flg, pot_flg, spring_flg = False, False, False
for row in f:
if row[0] == "#":
continue
if row.split()[0] == "species":
sp_flg, pot_flg, spring_flg = True, False, False
continue
if row.split()[0] == "buckingham":
sp_flg, pot_flg, spring_flg = False, True, False
continue
if row.split()[0] == "spring":
sp_flg, pot_flg, spring_flg = False, False, True
continue
elmnt = row.split()[0]
if sp_flg:
if bush_lewis_flag == "bush":
if elmnt not in species_dict.keys():
species_dict[elmnt] = {"inp_str": "", "oxi": 0}
species_dict[elmnt]["inp_str"] += row
species_dict[elmnt]["oxi"] += float(row.split()[2])
elif bush_lewis_flag == "lewis":
if elmnt == "O":
if row.split()[1] == "core":
species_dict["O_core"] = row
if row.split()[1] == "shel":
species_dict["O_shel"] = row
else:
metal = elmnt.split("_")[0]
# oxi_state = metaloxi.split('_')[1][0]
species_dict[elmnt] = metal + " core " + row.split()[2] + "\n"
continue
if pot_flg:
if bush_lewis_flag == "bush":
pot_dict[elmnt] = row
elif bush_lewis_flag == "lewis":
if elmnt == "O":
pot_dict["O"] = row
else:
metal = elmnt.split("_")[0]
# oxi_state = metaloxi.split('_')[1][0]
pot_dict[elmnt] = metal + " " + " ".join(row.split()[1:]) + "\n"
continue
if spring_flg:
spring_dict[elmnt] = row
if bush_lewis_flag == "bush":
# Fill the null keys in spring dict with empty strings
for key in pot_dict.keys():
if key not in spring_dict.keys():
spring_dict[key] = ""
self.species_dict = species_dict
self.pot_dict = pot_dict
self.spring_dict = spring_dict
class TersoffPotential:
"""
Generate Tersoff Potential Table from "OxideTersoffPotentialentials" file
"""
def __init__(self):
"""
Init TersoffPotential
"""
module_dir = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(module_dir, "OxideTersoffPotentials")) as f:
data = {}
for row in f:
metaloxi = row.split()[0]
line = row.split(")")
data[metaloxi] = line[1]
self.data = data
| {
"content_hash": "4e9430715122f555dc986d0c70f447f8",
"timestamp": "",
"source": "github",
"line_count": 902,
"max_line_length": 120,
"avg_line_length": 30.284922394678492,
"alnum_prop": 0.49247721199253214,
"repo_name": "gVallverdu/pymatgen",
"id": "58c3d42ad20b399f4a8b4319dce4b516153bc97b",
"size": "27411",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pymatgen/command_line/gulp_caller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "87"
},
{
"name": "CSS",
"bytes": "7572"
},
{
"name": "Cython",
"bytes": "39738"
},
{
"name": "HTML",
"bytes": "12642493"
},
{
"name": "OpenEdge ABL",
"bytes": "312"
},
{
"name": "Python",
"bytes": "9200904"
},
{
"name": "Roff",
"bytes": "1407429"
},
{
"name": "Shell",
"bytes": "12027"
}
],
"symlink_target": ""
} |
"""Large tests for metric_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.metrics.python.ops import metric_ops
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class StreamingPrecisionRecallAtEqualThresholdsLargeTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testLargeCase(self):
shape = [32, 512, 256, 1]
predictions = random_ops.random_uniform(
shape, 0.0, 1.0, dtype=dtypes_lib.float32)
labels = math_ops.greater(random_ops.random_uniform(shape, 0.0, 1.0), 0.5)
result, update_op = metric_ops.precision_recall_at_equal_thresholds(
labels=labels, predictions=predictions, num_thresholds=201)
# Run many updates, enough to cause highly inaccurate values if the
# code used float32 for accumulation.
num_updates = 71
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
for _ in xrange(num_updates):
sess.run(update_op)
prdata = sess.run(result)
# Since we use random values, we won't know the tp/fp/tn/fn values, but
# tp and fp at threshold 0 should be the total number of positive and
# negative labels, hence their sum should be total number of pixels.
expected_value = 1.0 * np.product(shape) * num_updates
got_value = prdata.tp[0] + prdata.fp[0]
# They should be at least within 1.
self.assertNear(got_value, expected_value, 1.0)
if __name__ == '__main__':
test.main()
| {
"content_hash": "fe0a6d3bc163788aa35eda2baf9716ff",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 78,
"avg_line_length": 36.46153846153846,
"alnum_prop": 0.7146624472573839,
"repo_name": "jart/tensorflow",
"id": "7acfc383eb9a659a600752cf57b4978daa8a07bc",
"size": "2585",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "tensorflow/contrib/metrics/python/ops/metric_ops_large_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "310149"
},
{
"name": "C++",
"bytes": "44871792"
},
{
"name": "CMake",
"bytes": "206735"
},
{
"name": "Go",
"bytes": "1163781"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "799574"
},
{
"name": "Jupyter Notebook",
"bytes": "2455980"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52050"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99265"
},
{
"name": "PHP",
"bytes": "2140"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "38792793"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "447966"
},
{
"name": "Smarty",
"bytes": "6870"
}
],
"symlink_target": ""
} |
from mock import Mock
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.linear_model import LogisticRegression
def test_multiclass_logloss():
from ..metrics import multiclass_logloss
act = np.array([[0, 1, 0], [1, 0, 0], [0, 0, 1]])
pred = np.array([[0.2, 0.7, 0.1], [0.6, 0.2, 0.2], [0.6, 0.1, 0.3]])
result = multiclass_logloss(act, pred)
assert result == 0.69049112401021973
def test_multiclass_logloss_actual_conversion():
from ..metrics import multiclass_logloss
act = np.array([1, 0, 2])
pred = np.array([[0.2, 0.7, 0.1], [0.6, 0.2, 0.2], [0.6, 0.1, 0.3]])
result = multiclass_logloss(act, pred)
assert result == 0.69049112401021973
def _learning_curve(learning_curve):
X = np.array([[0, 1], [1, 0], [1, 1]] * 100, dtype=float)
X[:, 1] += (np.random.random((300)) - 0.5)
y = np.array([0, 0, 1] * 100)
dataset = Mock()
dataset.train_test_split.return_value = train_test_split(X, y)
dataset.data = X
dataset.target = y
return learning_curve(dataset, LogisticRegression(), steps=5, verbose=1)
def test_learning_curve():
from ..metrics import learning_curve
scores_train, scores_test, sizes = _learning_curve(learning_curve)
assert len(scores_train) == 5
assert len(scores_test) == 5
assert sizes[0] == 22
assert sizes[-1] == 225.0
def test_learning_curve_logloss():
from ..metrics import learning_curve_logloss
scores_train, scores_test, sizes = _learning_curve(learning_curve_logloss)
assert len(scores_train) == 5
assert len(scores_test) == 5
assert sizes[0] == 22
assert sizes[-1] == 225.0
| {
"content_hash": "ea84d4784a76b036ef3a90c66cba9ffa",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 78,
"avg_line_length": 29.29824561403509,
"alnum_prop": 0.6395209580838324,
"repo_name": "dnouri/nolearn",
"id": "e78f9531e85bd553553cf2b29895983f03ae067d",
"size": "1670",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "nolearn/tests/test_metrics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "174681"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.